am bc667f1c: am 08f336ea: fix regression from Change-Id: I88c1e8192f1cbb5373af592b9e9f9a04ffe6fc7c that causes anchors in most docs other than reference to stop working
automerge: 36b1de7

* commit '36b1de7334f82a7df3728597f0437aedb02dccef':
diff --git a/.gitignore b/.gitignore
index 0d20b64..c9b568f 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1 +1,2 @@
 *.pyc
+*.swp
diff --git a/CleanSpec.mk b/CleanSpec.mk
index aebf0f9..f348692 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -200,6 +200,12 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/SprintDM.apk)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/omadm)
 
+# GCC 4.8
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/SHARED_LIBRARIES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/EXECUTABLES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/lib/*.o)
+
 # KLP I mean KitKat now API 19.
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
@@ -223,17 +229,26 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
 
+# L development
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
+# L development
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
 # Add ro.product.cpu.abilist{32,64} to build.prop.
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+
+# Unset TARGET_PREFER_32_BIT_APPS for 64 bit targets.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 
 # Adding dalvik.vm.dex2oat-flags to eng builds
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 
-# 4.4.4 (KKWT)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
-
 # Unset TARGET_PREFER_32_BIT_APPS for 64 bit targets.
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 
@@ -250,6 +265,14 @@
 # Switching to 32-bit-by-default host multilib build
 $(call add-clean-step, rm -rf $(HOST_OUT_INTERMEDIATES))
 
+# KKWT has become API 20
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
+# ims-common.jar added to BOOTCLASSPATH
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/ETC/init.environ.rc_intermediates)
+
 # Change ro.zygote for core_64_bit.mk from zygote32_64 to zygote64_32
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/root/default.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/default.prop)
@@ -258,6 +281,24 @@
 # dalvik.vm.image-dex2oat-Xms, and dalvik.vm.image-dex2oat-Xmx
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/root/default.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/default.prop)
+
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system)
+
+# Adding dalvik.vm.dex2oat-filter
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/root/default.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/default.prop)
+
+# API 21?
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
+# API 21!
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/core/Makefile b/core/Makefile
index 9a0fd7c..b08ad1b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -22,7 +22,7 @@
 # src:dest pair is the first one to match the same dest"
 #$(1): the src:dest pair
 define check-product-copy-files
-$(if $(filter %.apk, $(1)),$(error \
+$(if $(filter %.apk, $(call word-colon, 2, $(1))),$(error \
     Prebuilt apk found in PRODUCT_COPY_FILES: $(1), use BUILD_PREBUILT instead!))
 endef
 # filter out the duplicate <source file>:<dest file> pairs.
@@ -109,7 +109,7 @@
 build_desc := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(BUILD_NUMBER) $(BUILD_VERSION_TAGS)
 $(INSTALLED_BUILD_PROP_TARGET): PRIVATE_BUILD_DESC := $(build_desc)
 
-# The string used to uniquely identify this build;  used by the OTA server.
+# The string used to uniquely identify the combined build and product; used by the OTA server.
 ifeq (,$(strip $(BUILD_FINGERPRINT)))
   ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
     # Trim down BUILD_FINGERPRINT: the default BUILD_NUMBER makes it easily exceed
@@ -124,6 +124,22 @@
   $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
 endif
 
+# The string used to uniquely identify the system build; used by the OTA server.
+# This purposefully excludes any product-specific variables.
+ifeq (,$(strip $(BUILD_THUMBPRINT)))
+  BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+endif
+ifneq ($(words $(BUILD_THUMBPRINT)),1)
+  $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+endif
+
+KNOWN_OEM_THUMBPRINT_PROPERTIES := \
+    ro.product.brand \
+    ro.product.name \
+    ro.product.device
+OEM_THUMBPRINT_PROPERTIES := $(filter $(KNOWN_OEM_THUMBPRINT_PROPERTIES),\
+    $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES))
+
 # Display parameters shown under Settings -> About Phone
 ifeq ($(TARGET_BUILD_VARIANT),user)
   # User builds should show:
@@ -172,10 +188,17 @@
 else
 system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
 endif
-
 $(INSTALLED_BUILD_PROP_TARGET): $(BUILDINFO_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
+	$(hide) echo > $@
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES),)
+	$(hide) echo "#" >> $@; \
+	        echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
+	        echo "#" >> $@;
+	$(hide) $(foreach prop,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES), \
+		echo "import /oem/oem.prop $(prop)" >> $@;)
+endif
 	$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
 			TARGET_DEVICE="$(TARGET_DEVICE)" \
 			PRODUCT_NAME="$(TARGET_PRODUCT)" \
@@ -192,9 +215,11 @@
 			PLATFORM_VERSION="$(PLATFORM_VERSION)" \
 			PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
 			PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
+			PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
 			BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
 			TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
 			BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
+			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
 			TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
 			TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
 			TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
@@ -202,7 +227,7 @@
 			TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
 			TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
 			TARGET_AAPT_CHARACTERISTICS="$(TARGET_AAPT_CHARACTERISTICS)" \
-	        bash $(BUILDINFO_SH) > $@
+	        bash $(BUILDINFO_SH) >> $@
 	$(hide) $(foreach file,$(system_prop_file), \
 		if [ -f "$(file)" ]; then \
 			echo "#" >> $@; \
@@ -218,11 +243,27 @@
 		        echo "#" >> $@; )
 	$(hide) $(foreach line,$(ADDITIONAL_BUILD_PROPERTIES), \
 		echo "$(line)" >> $@;)
-	$(hide) build/tools/post_process_props.py $@
+	$(hide) build/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
 
 build_desc :=
 
 # -----------------------------------------------------------------
+# vendor build.prop
+#
+# For verifying that the vendor build is what we thing it is
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+INSTALLED_VENDOR_BUILD_PROP_TARGET := $(TARGET_OUT_VENDOR)/build.prop
+ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_VENDOR_BUILD_PROP_TARGET)
+$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(INSTALLED_BUILD_PROP_TARGET)
+	@echo Target vendor buildinfo: $@
+	@mkdir -p $(dir $@)
+	$(hide) echo > $@
+	$(hide) echo ro.vendor.build.date=`date`>>$@
+	$(hide) echo ro.vendor.build.date.utc=`date +%s`>>$@
+	$(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT)">>$@
+endif
+
+# -----------------------------------------------------------------
 # sdk-build.prop
 #
 # There are certain things in build.prop that we don't want to
@@ -459,18 +500,33 @@
 	@echo "make $@: ignoring dependencies"
 	$(hide) $(MKEXT2BOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
 
-else # TARGET_BOOTIMAGE_USE_EXT2 != true
+else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)) # TARGET_BOOTIMAGE_USE_EXT2 != true
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
+	$(call pretty,"Target boot image: $@")
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+	$(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+
+.PHONY: bootimage-nodeps
+bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
+	@echo "make $@: ignoring dependencies"
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
+	$(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $(INSTALLED_BOOTIMAGE_TARGET)
+	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+
+else # PRODUCT_SUPPORTS_VERITY != true
 
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
 	$(call pretty,"Target boot image: $@")
 	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE),raw)
+	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG)
 	@echo "make $@: ignoring dependencies"
 	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
-	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE),raw)
+	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 endif # TARGET_BOOTIMAGE_USE_EXT2
 
@@ -618,17 +674,33 @@
 endif
 endif
 
+# These options tell the recovery updater/installer how to mount the partitions writebale.
+# <fstype>=<fstype_opts>[|<fstype_opts>]...
+# fstype_opts := <opt>[,<opt>]...
+#         opt := <name>[=<value>]
+# The following worked on Nexus devices with Kernel 3.1, 3.4, 3.10
+DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS := ext4=max_batch_time=0,commit=1,data=ordered,barrier=1,errors=panic,nodelalloc
+
 ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
   INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
 endif
 
 ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true)
-INTERNAL_USERIMAGES_DEPS := $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(SIMG2IMG) $(E2FSCK)
+INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG)
+INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK)
+ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
+INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS)
+endif
 else
 INTERNAL_USERIMAGES_DEPS := $(MKYAFFS2)
 endif
+
 INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS)))
 
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
+INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
+endif
+
 SELINUX_FC := $(TARGET_ROOT_OUT)/file_contexts
 INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
 
@@ -637,14 +709,21 @@
 define generate-userimage-prop-dictionary
 $(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
 $(if $(mkyaffs2_extra_flags),$(hide) echo "mkyaffs2_extra_flags=$(mkyaffs2_extra_flags)" >> $(1))
 $(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1)
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
 $(if $(2),$(hide) $(foreach kv,$(2),echo "$(kv)" >> $(1);))
 endef
 
@@ -664,11 +743,32 @@
 recovery_binary := $(call intermediates-dir-for,EXECUTABLES,recovery)/recovery
 recovery_resources_common := $(call include-path-for, recovery)/res
 
-# Select the 18x32 font on high-density devices; and the 12x22 font on
-# other devices.  Note that the font selected here can be overridden
-# for a particular device by putting a font.png in its private
-# recovery resources.
-ifneq (,$(filter xxhdpi xhdpi,$(subst $(comma),$(space),$(PRODUCT_AAPT_CONFIG))))
+# Set recovery_density to the density bucket of the device.
+recovery_density := unknown
+ifneq (,$(PRODUCT_AAPT_PREF_CONFIG))
+# If PRODUCT_AAPT_PREF_CONFIG includes a dpi bucket, then use that value.
+recovery_density := $(filter %dpi,$(PRODUCT_AAPT_PREF_CONFIG))
+else
+# Otherwise, use the highest density that appears in PRODUCT_AAPT_CONFIG.
+# Order is important here; we'll take the first one that's found.
+recovery_densities := $(filter $(PRODUCT_AAPT_CONFIG_SP),xxxhdpi xxhdpi xhdpi hdpi tvdpi mdpi ldpi)
+ifneq (,$(recovery_densities))
+recovery_density := $(word 1,$(recovery_densities))
+endif
+endif
+
+ifneq (,$(wildcard $(recovery_resources_common)-$(recovery_density)))
+recovery_resources_common := $(recovery_resources_common)-$(recovery_density)
+else
+recovery_resources_common := $(recovery_resources_common)-xhdpi
+endif
+
+# Select the 18x32 font on high-density devices (xhdpi and up); and
+# the 12x22 font on other devices.  Note that the font selected here
+# can be overridden for a particular device by putting a font.png in
+# its private recovery resources.
+
+ifneq (,$(filter xxxhdpi xxhdpi xhdpi,$(recovery_density)))
 recovery_font := $(call include-path-for, recovery)/fonts/18x32.png
 else
 recovery_font := $(call include-path-for, recovery)/fonts/12x22.png
@@ -750,7 +850,9 @@
 	$(hide) cp -f $(recovery_sepolicy) $(TARGET_RECOVERY_ROOT_OUT)/sepolicy
 	$(hide) -cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/
 	$(hide) cp -f $(recovery_binary) $(TARGET_RECOVERY_ROOT_OUT)/sbin/
-	$(hide) cp -rf $(recovery_resources_common) $(TARGET_RECOVERY_ROOT_OUT)/
+	$(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res
+	$(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
+	$(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
 	$(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
 	$(hide) $(foreach item,$(recovery_resources_private), \
 	  cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
@@ -761,7 +863,10 @@
 	        > $(TARGET_RECOVERY_ROOT_OUT)/default.prop
 	$(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
 	$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE),raw)
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
+	$(BOOT_SIGNER) /recovery $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY) $@
+endif
+	$(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
 	@echo ----- Made recovery image: $@ --------
 
 $(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET)
@@ -828,14 +933,36 @@
     $(call intermediates-dir-for,PACKAGING,systemimage)
 BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
 
+# Create symlink /system/vendor to /vendor if necessary.
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+define create-system-vendor-symlink
+$(hide) if [ -d $(TARGET_OUT)/vendor ] && [ ! -h $(TARGET_OUT)/vendor ]; then \
+  echo 'Non-symlink $(TARGET_OUT)/vendor detected!' 1>&2; \
+  echo 'You cannot install files to $(TARGET_OUT)/vendor while building a separate vendor.img!' 1>&2; \
+  exit 1; \
+fi
+$(hide) ln -sf /vendor $(TARGET_OUT)/vendor
+endef
+else
+define create-system-vendor-symlink
+endef
+endif
+
 # $(1): output file
 define build-systemimage-target
   @echo "Target system fs image: $(1)"
+  $(call create-system-vendor-symlink)
   @mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
-  $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, skip_fsck=true)
+  $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
+      skip_fsck=true)
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
       ./build/tools/releasetools/build_image.py \
-      $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1)
+      $(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1) \
+      || ( echo "Out of space? the tree size of $(TARGET_OUT) is (MB): " 1>&2 ;\
+           du -sm $(TARGET_OUT) 1>&2;\
+           echo "The max is $$(( $(BOARD_SYSTEMIMAGE_PARTITION_SIZE) / 1048576 )) MB." 1>&2 ;\
+           mkdir -p $(DIST_DIR); cp $(INSTALLED_FILES_FILE) $(DIST_DIR)/installed-files-rescued.txt; \
+           exit 1 )
 endef
 
 $(BUILT_SYSTEMIMAGE): $(FULL_SYSTEMIMAGE_DEPS) $(INSTALLED_FILES_FILE)
@@ -865,7 +992,7 @@
 $(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH) | $(ACP)
 	@echo "Install system fs image: $@"
 	$(copy-file-to-target)
-	$(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE),yaffs)
+	$(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
 
 systemimage: $(INSTALLED_SYSTEMIMAGE)
 
@@ -874,7 +1001,7 @@
 	            | $(INTERNAL_USERIMAGES_DEPS)
 	@echo "make $@: ignoring dependencies"
 	$(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE))
-	$(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE),yaffs)
+	$(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
 
 ifneq (,$(filter systemimage-nodeps snod, $(MAKECMDGOALS)))
 ifeq (true,$(WITH_DEXPREOPT))
@@ -885,10 +1012,11 @@
 #######
 ## system tarball
 define build-systemtarball-target
-    $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
-    $(MKTARBALL) $(FS_GET_STATS) \
-		$(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
-		$(INSTALLED_SYSTEMTARBALL_TARGET)
+  $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
+  $(call create-system-vendor-symlink)
+  $(MKTARBALL) $(FS_GET_STATS) \
+    $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
+    $(INSTALLED_SYSTEMTARBALL_TARGET)
 endef
 
 ifndef SYSTEM_TARBALL_FORMAT
@@ -925,6 +1053,10 @@
 		$(TARGET_COPY_OUT_SYSTEM) \
 		$(patsubst $(PRODUCT_OUT)/%, %, $(TARGET_OUT_NOTICE_FILES)) \
 		$(addprefix symbols/,$(PDK_SYMBOL_FILES_LIST))
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+	$(hide) cd $(dir $@) && zip -qry $(notdir $@) \
+		$(TARGET_COPY_OUT_VENDOR)
+endif
 ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
 	$(hide) cd $(OUT_DIR) && zip -qry $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
 endif
@@ -997,7 +1129,7 @@
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET)
-  $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE),yaffs)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
 endef
 
 # We just build this directly to the install location.
@@ -1052,7 +1184,7 @@
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET)
-  $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE),yaffs)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
 endef
 
 # We just build this directly to the install location.
@@ -1071,7 +1203,12 @@
 # vendor partition image
 ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
 INTERNAL_VENDORIMAGE_FILES := \
-    $(filter $(TARGET_OUT_VENDOR)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
+    $(filter $(TARGET_OUT_VENDOR)/%,\
+      $(ALL_DEFAULT_INSTALLED_MODULES)\
+      $(ALL_PDK_FUSION_FILES))
+
+# platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES).
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
@@ -1085,7 +1222,7 @@
   $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
       ./build/tools/releasetools/build_image.py \
       $(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt $(INSTALLED_VENDORIMAGE_TARGET)
-  $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE),yaffs)
+  $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
 endef
 
 # We just build this directly to the install location.
@@ -1100,12 +1237,6 @@
 endif # BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
 
 # -----------------------------------------------------------------
-# bring in the installer image generation defines if necessary
-ifeq ($(TARGET_USE_DISKINSTALLER),true)
-include bootable/diskinstaller/config.mk
-endif
-
-# -----------------------------------------------------------------
 # host tools needed to build dist and OTA packages
 
 DISTTOOLS :=  $(HOST_OUT_EXECUTABLES)/minigzip \
@@ -1121,7 +1252,11 @@
 	  $(HOST_OUT_EXECUTABLES)/mkuserimg.sh \
 	  $(HOST_OUT_EXECUTABLES)/make_ext4fs \
 	  $(HOST_OUT_EXECUTABLES)/simg2img \
-	  $(HOST_OUT_EXECUTABLES)/e2fsck
+	  $(HOST_OUT_EXECUTABLES)/e2fsck \
+	  $(HOST_OUT_EXECUTABLES)/build_verity_tree \
+	  $(HOST_OUT_EXECUTABLES)/verity_signer \
+	  $(HOST_OUT_EXECUTABLES)/append2simg \
+	  $(HOST_OUT_EXECUTABLES)/boot_signer
 
 OTATOOLS := $(DISTTOOLS) \
 	  $(HOST_OUT_EXECUTABLES)/aapt
@@ -1268,6 +1403,12 @@
 ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
 	$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
+ifdef TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS
+	@# TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS can be empty to indicate that nothing but defaults should be used.
+	$(hide) echo "recovery_mount_options=$(TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
+else
+	$(hide) echo "recovery_mount_options=$(DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS)" >> $(zip_root)/META/misc_info.txt
+endif
 	$(hide) echo "tool_extensions=$(tool_extensions)" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "default_system_dev_certificate=$(DEFAULT_SYSTEM_DEV_CERTIFICATE)" >> $(zip_root)/META/misc_info.txt
 ifdef PRODUCT_EXTRA_RECOVERY_KEYS
@@ -1277,14 +1418,22 @@
 	$(hide) echo "use_set_metadata=1" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "update_rename_support=1" >> $(zip_root)/META/misc_info.txt
+ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
+	# OTA scripts are only interested in fingerprint related properties
+	$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
+endif
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
+	$(hide) ./build/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
 	@# Zip everything up, preserving symlinks
 	$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
-	@# Run fs_config on all the system, boot ramdisk, and recovery ramdisk files in the zip, and save the output
+	@# Run fs_config on all the system, vendor, boot ramdisk,
+	@# and recovery ramdisk files in the zip, and save the output
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="SYSTEM/" } /^SYSTEM\// {print "system/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/filesystem_config.txt
+	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="VENDOR/" } /^VENDOR\// {print "vendor/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/vendor_filesystem_config.txt
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="BOOT/RAMDISK/" } /^BOOT\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/boot_filesystem_config.txt
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="RECOVERY/RAMDISK/" } /^RECOVERY\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/recovery_filesystem_config.txt
 	$(hide) (cd $(zip_root) && zip -q ../$(notdir $@) META/*filesystem_config.txt)
+	$(hide) ./build/tools/releasetools/add_img_to_target_files -p $(HOST_OUT) $@
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
@@ -1315,8 +1464,10 @@
 	@echo "Package OTA: $@"
 	$(hide) MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/ota_from_target_files -v \
+	   --block \
 	   -p $(HOST_OUT) \
 	   -k $(KEY_CERT_PAIR) \
+	   $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
 	   $(BUILT_TARGET_FILES_PACKAGE) $@
 
 .PHONY: otapackage
@@ -1338,25 +1489,16 @@
 
 INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 
-ifeq ($(TARGET_RELEASETOOLS_EXTENSIONS),)
-# default to common dir for device vendor
-$(INTERNAL_UPDATE_PACKAGE_TARGET): extensions := $(TARGET_DEVICE_DIR)/../common
-else
-$(INTERNAL_UPDATE_PACKAGE_TARGET): extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
-endif
-
 $(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(DISTTOOLS)
 	@echo "Package: $@"
 	$(hide) MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/img_from_target_files -v \
-	   -s $(extensions) \
 	   -p $(HOST_OUT) \
 	   $(BUILT_TARGET_FILES_PACKAGE) $@
 
 .PHONY: updatepackage
 updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
 
-
 # -----------------------------------------------------------------
 # A zip of the symbols directory.  Keep the full paths to make it
 # more obvious where these files came from.
@@ -1368,10 +1510,14 @@
 name := $(name)-symbols-$(FILE_NAME_TAG)
 
 SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
+# For apps_only build we'll establish the dependency later in build/core/main.mk.
+ifndef TARGET_BUILD_APPS
 $(SYMBOLS_ZIP): $(INSTALLED_SYSTEMIMAGE) $(INSTALLED_BOOTIMAGE_TARGET)
+endif
+$(SYMBOLS_ZIP):
 	@echo "Package symbols: $@"
 	$(hide) rm -rf $@
-	$(hide) mkdir -p $(dir $@)
+	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED)
 	$(hide) zip -qr $@ $(TARGET_OUT_UNSTRIPPED)
 
 # -----------------------------------------------------------------
@@ -1389,7 +1535,7 @@
 	@echo "Package apps: $@"
 	$(hide) rm -rf $@
 	$(hide) mkdir -p $(dir $@)
-	$(hide) zip -qj $@ $(TARGET_OUT_APPS)/* $(TARGET_OUT_APPS_PRIVILEGED)/*
+	$(hide) zip -qj $@ $(TARGET_OUT_APPS)/*/*.apk $(TARGET_OUT_APPS_PRIVILEGED)/*/*.apk
 
 
 #------------------------------------------------------------------
@@ -1401,7 +1547,7 @@
 # the dependency will be set up later in build/core/main.mk.
 $(EMMA_META_ZIP) :
 	@echo "Collecting Emma coverage meta files."
-	$(hide) find $(TARGET_COMMON_OUT_ROOT) -name "coverage.em" | \
+	$(hide) find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "coverage.em" | \
 		zip -@ -q $@
 
 endif # EMMA_INSTRUMENT=true
@@ -1417,7 +1563,8 @@
 	@echo "Packaging Proguard obfuscation dictionary files."
 	$(hide) dict_files=`find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary`; \
 		if [ -n "$$dict_files" ]; then \
-		  zip -q $@ $$dict_files; \
+		  unobfuscated_jars=$${dict_files//proguard_dictionary/classes.jar}; \
+		  zip -q $@ $$dict_files $$unobfuscated_jars; \
 		else \
 		  touch $(dir $@)/dummy; \
 		  (cd $(dir $@) && zip -q $(notdir $@) dummy); \
@@ -1493,7 +1640,6 @@
 ATREE_FILES := \
 	$(ALL_PREBUILT) \
 	$(ALL_COPIED_HEADERS) \
-	$(ALL_GENERATED_SOURCES) \
 	$(ALL_DEFAULT_INSTALLED_MODULES) \
 	$(INSTALLED_RAMDISK_TARGET) \
 	$(ALL_DOCS) \
@@ -1521,6 +1667,8 @@
 sdk_atree_files += $(atree_dir)/sdk.atree
 endif
 
+include $(BUILD_SYSTEM)/sdk_font.mk
+
 deps := \
 	$(target_notice_file_txt) \
 	$(tools_notice_file_txt) \
@@ -1534,7 +1682,8 @@
 	$(ATREE_FILES) \
 	$(sdk_atree_files) \
 	$(HOST_OUT_EXECUTABLES)/atree \
-	$(HOST_OUT_EXECUTABLES)/line_endings
+	$(HOST_OUT_EXECUTABLES)/line_endings \
+	$(SDK_FONT_DEPS)
 
 INTERNAL_SDK_TARGET := $(sdk_dir)/$(sdk_name).zip
 $(INTERNAL_SDK_TARGET): PRIVATE_NAME := $(sdk_name)
@@ -1542,10 +1691,6 @@
 $(INTERNAL_SDK_TARGET): PRIVATE_DEP_FILE := $(sdk_dep_file)
 $(INTERNAL_SDK_TARGET): PRIVATE_INPUT_FILES := $(sdk_atree_files)
 
-sdk_font_temp_dir := $(call intermediates-dir-for,PACKAGING,sdk-fonts)
-sdk_font_input_list := frameworks/base/data/fonts external/noto-fonts
-sdk_font_rename_script := frameworks/base/tools/layoutlib/rename_font/build_font.py
-
 # Set SDK_GNU_ERROR to non-empty to fail when a GNU target is built.
 #
 #SDK_GNU_ERROR := true
@@ -1561,9 +1706,7 @@
 	  fi; \
 	done; \
 	if [ $$FAIL ]; then exit 1; fi
-	$(hide) mkdir -p $(sdk_font_temp_dir)
-	$(hide) PYTHONPATH=$$PYTHONPATH:external/fonttools/Lib $(sdk_font_rename_script) $(sdk_font_input_list) \
-	        $(sdk_font_temp_dir)
+	$(hide) echo $(notdir $(SDK_FONT_DEPS)) | tr " " "\n"  > $(SDK_FONT_TEMP)/fontsInSdk.txt
 	$(hide) ( \
 		ATREE_STRIP="strip -x" \
 		$(HOST_OUT_EXECUTABLES)/atree \
@@ -1579,7 +1722,7 @@
 			-v "TARGET_ARCH=$(TARGET_ARCH)" \
 			-v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
 			-v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
-			-v "FONT_OUT=$(sdk_font_temp_dir)" \
+			-v "FONT_OUT=$(SDK_FONT_TEMP)" \
 			-o $(PRIVATE_DIR) && \
 		cp -f $(target_notice_file_txt) \
 				$(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
@@ -1606,11 +1749,11 @@
 INTERNAL_FINDBUGS_HTML_TARGET := $(PRODUCT_OUT)/findbugs.html
 $(INTERNAL_FINDBUGS_XML_TARGET): $(ALL_FINDBUGS_FILES)
 	@echo UnionBugs: $@
-	$(hide) prebuilt/common/findbugs/bin/unionBugs $(ALL_FINDBUGS_FILES) \
+	$(hide) $(FINDBUGS_DIR)/unionBugs $(ALL_FINDBUGS_FILES) \
 	> $@
 $(INTERNAL_FINDBUGS_HTML_TARGET): $(INTERNAL_FINDBUGS_XML_TARGET)
 	@echo ConvertXmlToText: $@
-	$(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl \
+	$(hide) $(FINDBUGS_DIR)/convertXmlToText -html:fancy.xsl \
 	$(INTERNAL_FINDBUGS_XML_TARGET) > $@
 
 # -----------------------------------------------------------------
@@ -1622,6 +1765,9 @@
 include $(sort $(wildcard $(BUILD_SYSTEM)/tasks/*.mk))
 -include $(sort $(wildcard vendor/*/build/tasks/*.mk))
 -include $(sort $(wildcard device/*/build/tasks/*.mk))
+# Also the project-specific tasks
+-include $(sort $(wildcard vendor/*/*/build/tasks/*.mk))
+-include $(sort $(wildcard device/*/*/build/tasks/*.mk))
 endif
 
 # -----------------------------------------------------------------
diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt
index 440e7f8..9abd381 100644
--- a/core/apicheck_msg_current.txt
+++ b/core/apicheck_msg_current.txt
@@ -7,7 +7,7 @@
       errors above.
 
    2) You can update current.txt by executing the following command:
-         make update-api
+         make %UPDATE_API%
 
       To submit the revised current.txt to the main Android repository,
       you will need approval.
diff --git a/core/base_rules.mk b/core/base_rules.mk
index e840047..8c25897 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -114,6 +114,8 @@
   else
   ifeq (true,$(LOCAL_PROPRIETARY_MODULE))
     partition_tag := _VENDOR
+  else ifeq (true,$(LOCAL_OEM_MODULE))
+    partition_tag := _OEM
   else
     # The definition of should-install-to-system will be different depending
     # on which goal (e.g., sdk or just droid) is being built.
@@ -174,9 +176,15 @@
   built_module_path := $(intermediates)
 endif
 LOCAL_BUILT_MODULE := $(built_module_path)/$(my_built_module_stem)
-built_module_path :=
 
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+  # Apk and its attachments reside in its own subdir.
+  ifeq ($(LOCAL_MODULE_CLASS),APPS)
+  # framework-res.apk doesn't like the additional layer.
+  ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+    my_module_path := $(my_module_path)/$(LOCAL_MODULE)
+  endif
+  endif
   LOCAL_INSTALLED_MODULE := $(my_module_path)/$(my_installed_module_stem)
 endif
 
@@ -198,12 +206,12 @@
 aidl_preprocess_import :=
 LOCAL_SDK_VERSION:=$(strip $(LOCAL_SDK_VERSION))
 ifdef LOCAL_SDK_VERSION
-ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
+ifneq ($(filter current system_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
   # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
   aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
 else
   aidl_preprocess_import := $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_VERSION)/framework.aidl
-endif # !current
+endif # not current or system_current
 else
 # build against the platform.
 LOCAL_AIDL_INCLUDES += $(FRAMEWORKS_BASE_JAVA_SRC_DIRS)
@@ -230,7 +238,7 @@
 
 # Emit a java source file with constants for the tags, if
 # LOCAL_MODULE_CLASS is "APPS" or "JAVA_LIBRARIES".
-ifneq ($(strip $(filter $(LOCAL_MODULE_CLASS),APPS JAVA_LIBRARIES)),)
+ifneq ($(filter $(LOCAL_MODULE_CLASS),APPS JAVA_LIBRARIES),)
 
 logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/src/, $(logtags_sources)))
 logtags_sources := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(logtags_sources))
@@ -386,13 +394,14 @@
 ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
 # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,android_stubs_current)
+else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,android_system_stubs_current)
 else
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,sdk_v$(LOCAL_SDK_VERSION))
-endif # current
+endif # current or system_current
 endif # LOCAL_SDK_VERSION
 endif # TARGET_
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EXTRA_JAR_ARGS := $(extra_jar_args)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JAVA_LIBRARIES := $(full_static_java_libs)
 
@@ -423,10 +432,10 @@
 
 # This is set by packages that are linking to other packages that export
 # shared libraries, allowing them to make use of the code in the linked apk.
-LOCAL_APK_LIBRARIES := $(strip $(LOCAL_APK_LIBRARIES))
-ifdef LOCAL_APK_LIBRARIES
+apk_libraries := $(sort $(LOCAL_APK_LIBRARIES) $(LOCAL_RES_LIBRARIES))
+ifneq ($(apk_libraries),)
   link_apk_libraries := \
-      $(foreach lib,$(LOCAL_APK_LIBRARIES), \
+      $(foreach lib,$(apk_libraries), \
         $(call intermediates-dir-for, \
               APPS,$(lib),,COMMON)/classes.jar)
 
@@ -454,6 +463,10 @@
   full_java_lib_deps += $(link_instr_classes_jar)
 endif
 
+endif  # need_compile_java
+
+# We may want to add jar manifest or jar resource files even if there is no java code at all.
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EXTRA_JAR_ARGS := $(extra_jar_args)
 jar_manifest_file :=
 ifneq ($(strip $(LOCAL_JAR_MANIFEST)),)
 jar_manifest_file := $(LOCAL_PATH)/$(LOCAL_JAR_MANIFEST)
@@ -462,9 +475,6 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAR_MANIFEST :=
 endif
 
-endif  # need_compile_java
-
-
 ###########################################################
 ## make clean- targets
 ###########################################################
@@ -598,7 +608,7 @@
 ALL_MODULES.$(my_register_name).INSTALLED := \
     $(strip $(ALL_MODULES.$(my_register_name).INSTALLED) $(LOCAL_INSTALLED_MODULE))
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED := \
-    $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED)$(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE))
+    $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) $(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE))
 endif
 ifdef LOCAL_PICKUP_FILES
 # Files or directories ready to pick up by the build system
@@ -622,6 +632,9 @@
 ifdef LOCAL_2ND_ARCH_VAR_PREFIX
 ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
 endif
+ifdef aidl_sources
+ALL_MODULES.$(my_register_name).AIDL_FILES := $(aidl_sources)
+endif
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
 
diff --git a/core/binary.mk b/core/binary.mk
index 48b4081..d339317 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -113,6 +113,17 @@
 my_c_includes := $(LOCAL_C_INCLUDES)
 my_generated_sources := $(LOCAL_GENERATED_SOURCES)
 
+# MinGW spits out warnings about -fPIC even for -fpie?!) being ignored because
+# all code is position independent, and then those warnings get promoted to
+# errors.
+ifeq ($(strip $(USE_MINGW)),)
+ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+my_cflags += -fpie
+else
+my_cflags += -fPIC
+endif
+endif
+
 my_src_files += $(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
 my_shared_libraries += $(LOCAL_SHARED_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_SHARED_LIBRARIES_$(my_32_64_bit_suffix))
 my_cflags += $(LOCAL_CFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CFLAGS_$(my_32_64_bit_suffix))
@@ -154,6 +165,8 @@
 
 ifeq ($(strip $(LOCAL_ADDRESS_SANITIZER)),true)
   my_clang := true
+  # Frame pointer based unwinder in ASan requires ARM frame setup.
+  LOCAL_ARM_MODE := arm
   my_cflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
   my_ldflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS)
   my_shared_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
@@ -174,11 +187,10 @@
 ####################################################
 ## Add FDO flags if FDO is turned on and supported
 ####################################################
-ifneq ($(strip $(LOCAL_FDO_SUPPORT)),)
+ifeq ($(strip $(LOCAL_FDO_SUPPORT)), true)
   ifeq ($(strip $(LOCAL_IS_HOST_MODULE)),)
     my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_CFLAGS)
-    my_cppflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_CFLAGS)
-    my_ldflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_CFLAGS)
+    my_ldflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_LDFLAGS)
   endif
 endif
 
@@ -315,6 +327,7 @@
 endif
 endif
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX := $(my_cxx)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLANG := $(my_clang)
 
 # TODO: support a mix of standard extensions so that this isn't necessary
 LOCAL_CPP_EXTENSION := $(strip $(LOCAL_CPP_EXTENSION))
@@ -395,6 +408,8 @@
 # This can be disabled with LOCAL_RENDERSCRIPT_FLAGS := -Wno-error
 renderscript_flags := -Wall -Werror
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
+# -m32 or -m64
+renderscript_flags += -m$(my_32_64_bit_suffix)
 
 renderscript_includes := \
     $(TOPDIR)external/clang/lib/Headers \
@@ -465,6 +480,9 @@
 proto_generated_objects := $(addprefix $(proto_generated_obj_dir)/, \
     $(patsubst %.proto,%.pb.o,$(proto_sources_fullpath)))
 
+# Auto-export the generated proto source dir.
+LOCAL_EXPORT_C_INCLUDE_DIRS += $(proto_generated_cc_sources_dir)
+
 # Ensure the transform-proto-to-cc rule is only defined once in multilib build.
 ifndef $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined
 $(proto_generated_cc_sources): PRIVATE_PROTO_INCLUDES := $(TOP)
@@ -945,7 +963,8 @@
 ###########################################################
 export_includes := $(intermediates)/export_includes
 $(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE)
+# Make sure .pb.h are already generated before any dependent source files get compiled.
+$(export_includes) : $(LOCAL_MODULE_MAKEFILE) $(proto_generated_headers)
 	@echo Export includes file: $< -- $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 ifdef LOCAL_EXPORT_C_INCLUDE_DIRS
diff --git a/core/build_id.mk b/core/build_id.mk
index f94b224..00a691f 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,6 +18,4 @@
 # (like "CRB01").  It must be a single word, and is
 # capitalized by convention.
 
-BUILD_ID := AOSP
-
-DISPLAY_BUILD_NUMBER := true
+export BUILD_ID=LMP
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index 595fb56..59ed41f 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -53,3 +53,5 @@
   $(CLANG_CONFIG_arm_TARGET_EXTRA_LDFLAGS)
 
 $(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
+RS_COMPAT_TRIPLE := armv7-none-linux-gnueabi
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index 98bbeb2..6f9e540 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -51,3 +51,5 @@
   $(CLANG_CONFIG_arm64_TARGET_EXTRA_LDFLAGS)
 
 RS_TRIPLE := aarch64-linux-android
+RS_TRIPLE_CFLAGS :=
+RS_COMPAT_TRIPLE := aarch64-linux-android
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
index e70e254..91067f5 100644
--- a/core/clang/TARGET_mips.mk
+++ b/core/clang/TARGET_mips.mk
@@ -3,7 +3,7 @@
 
 CLANG_CONFIG_mips_TARGET_TRIPLE := mipsel-linux-android
 CLANG_CONFIG_mips_TARGET_TOOLCHAIN_PREFIX := \
-  $($(clang_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/$(CLANG_CONFIG_mips_TARGET_TRIPLE)/bin
+  $($(clang_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/mips64el-linux-android/bin
 
 CLANG_CONFIG_mips_TARGET_EXTRA_ASFLAGS := \
   $(CLANG_CONFIG_EXTRA_ASFLAGS) \
@@ -43,11 +43,13 @@
   $(CLANG_CONFIG_mips_TARGET_EXTRA_CFLAGS)
 
 $(clang_2nd_arch_prefix)CLANG_TARGET_GLOBAL_CPPFLAGS := \
-  $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$(clang_2nd_arch_prefix)$(TARGET_GLOBAL_CPPFLAGS)) \
+  $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_CPPFLAGS)) \
   $(CLANG_CONFIG_mips_TARGET_EXTRA_CPPFLAGS)
 
 $(clang_2nd_arch_prefix)CLANG_TARGET_GLOBAL_LDFLAGS := \
   $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_mips_TARGET_EXTRA_LDFLAGS)
 
-$(clang_2nd_arch_prefix)RS_TRIPLE := mipsel-unknown-linux
+$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
+RS_COMPAT_TRIPLE := mipsel-linux-android
diff --git a/core/clang/TARGET_mips64.mk b/core/clang/TARGET_mips64.mk
index b2e536c..ab69aed 100644
--- a/core/clang/TARGET_mips64.mk
+++ b/core/clang/TARGET_mips64.mk
@@ -50,4 +50,6 @@
   $(call convert-to-clang-flags,$(TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_mips64_TARGET_EXTRA_LDFLAGS)
 
-RS_TRIPLE := mips64el-unknown-linux
+RS_TRIPLE := aarch64-linux-android
+RS_TRIPLE_CFLAGS :=
+RS_COMPAT_TRIPLE := mips64el-linux-android
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index 70cb252..60ee06f 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -52,4 +52,6 @@
   $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_x86_TARGET_EXTRA_LDFLAGS)
 
-$(clang_2nd_arch_prefix)RS_TRIPLE := i686-unknown-linux
+$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS := -D__i386__
+RS_COMPAT_TRIPLE := i686-linux-android
diff --git a/core/clang/TARGET_x86_64.mk b/core/clang/TARGET_x86_64.mk
index 14944bd..5561e42 100644
--- a/core/clang/TARGET_x86_64.mk
+++ b/core/clang/TARGET_x86_64.mk
@@ -50,4 +50,6 @@
   $(call convert-to-clang-flags,$(TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_x86_64_TARGET_EXTRA_LDFLAGS)
 
-RS_TRIPLE := x86_64-unknown-linux
+RS_TRIPLE := aarch64-linux-android
+RS_TRIPLE_CFLAGS := -D__x86_64__
+RS_COMPAT_TRIPLE := x86_64-linux-android
diff --git a/core/clang/config.mk b/core/clang/config.mk
index f50a0cb..5b2aea5 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -14,9 +14,20 @@
 LLVM_AS := $(LLVM_PREBUILTS_PATH)/llvm-as$(BUILD_EXECUTABLE_SUFFIX)
 LLVM_LINK := $(LLVM_PREBUILTS_PATH)/llvm-link$(BUILD_EXECUTABLE_SUFFIX)
 
-CLANG_TBLGEN := $(HOST_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
-LLVM_TBLGEN := $(HOST_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
+CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
+LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
 
+# The C/C++ compiler can be wrapped by setting the CC/CXX_WRAPPER vars.
+ifdef CC_WRAPPER
+  ifneq ($(CC_WRAPPER),$(firstword $(CLANG)))
+    CLANG := $(CC_WRAPPER) $(CLANG)
+  endif
+endif
+ifdef CXX_WRAPPER
+  ifneq ($(CXX_WRAPPER),$(firstword $(CLANG_CXX)))
+    CLANG_CXX := $(CXX_WRAPPER) $(CLANG_CXX)
+  endif
+endif
 
 # Clang flags for all host or target rules
 CLANG_CONFIG_EXTRA_ASFLAGS :=
@@ -81,9 +92,10 @@
 CLANG_CONFIG_EXTRA_TARGET_C_INCLUDES := $(LLVM_PREBUILTS_HEADER_PATH) $(TARGET_OUT_HEADERS)/clang
 
 # Address sanitizer clang config
-ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fsanitize=address
+ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan_$(TARGET_ARCH)_android
+ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fsanitize=address -fno-omit-frame-pointer
 ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS := -Wl,-u,__asan_preinit
-ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES := libdl libasan_preload
+ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES := libdl $(ADDRESS_SANITIZER_RUNTIME_LIBRARY)
 ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES := libasan
 
 # This allows us to use the superset of functionality that compiler-rt
diff --git a/core/clang/mips.mk b/core/clang/mips.mk
index cef7823..08daf40 100644
--- a/core/clang/mips.mk
+++ b/core/clang/mips.mk
@@ -7,18 +7,10 @@
 # Include common unknown flags
 CLANG_CONFIG_mips_UNKNOWN_CFLAGS := \
   $(CLANG_CONFIG_UNKNOWN_CFLAGS) \
-  -EL \
-  -mips32 \
-  -mips32r2 \
-  -mhard-float \
   -fno-strict-volatile-bitfields \
   -fgcse-after-reload \
   -frerun-cse-after-loop \
   -frename-registers \
-  -march=mips32r2 \
-  -mtune=mips32r2 \
-  -march=mips32 \
-  -mtune=mips32 \
   -msynci \
   -mno-fused-madd
 
diff --git a/core/clang/mips64.mk b/core/clang/mips64.mk
index 9d1117b..612175c 100644
--- a/core/clang/mips64.mk
+++ b/core/clang/mips64.mk
@@ -1,7 +1,5 @@
 # Clang flags for mips64 arch, target or host.
 
-$(warning Untested mips64 clang flags, fix me!)
-
 CLANG_CONFIG_mips64_EXTRA_ASFLAGS :=
 CLANG_CONFIG_mips64_EXTRA_CFLAGS :=
 CLANG_CONFIG_mips64_EXTRA_LDFLAGS :=
@@ -9,18 +7,10 @@
 # Include common unknown flags
 CLANG_CONFIG_mips64_UNKNOWN_CFLAGS := \
   $(CLANG_CONFIG_UNKNOWN_CFLAGS) \
-  -EL \
-  -mips32 \
-  -mips32r2 \
-  -mhard-float \
   -fno-strict-volatile-bitfields \
   -fgcse-after-reload \
   -frerun-cse-after-loop \
   -frename-registers \
-  -march=mips32r2 \
-  -mtune=mips32r2 \
-  -march=mips32 \
-  -mtune=mips32 \
   -msynci \
   -mno-fused-madd
 
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 0932aa1..1bada38 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -214,6 +214,8 @@
 	$(PRODUCT_OUT)/recovery \
 	$(PRODUCT_OUT)/root \
 	$(PRODUCT_OUT)/system \
+	$(PRODUCT_OUT)/vendor \
+	$(PRODUCT_OUT)/oem \
 	$(PRODUCT_OUT)/dex_bootjars \
 	$(PRODUCT_OUT)/obj/JAVA_LIBRARIES \
 	$(PRODUCT_OUT)/obj/FAKE \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index e3e8e1d..f23c4a6 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -23,6 +23,7 @@
 LOCAL_OVERRIDES_PACKAGES:=
 LOCAL_EXPORT_PACKAGE_RESOURCES:=
 LOCAL_MANIFEST_PACKAGE_NAME:=
+LOCAL_PACKAGE_SPLITS:=
 LOCAL_REQUIRED_MODULES:=
 LOCAL_ACP_UNAVAILABLE:=
 LOCAL_MODULE_TAGS:=
@@ -92,6 +93,7 @@
 LOCAL_JAR_MANIFEST:=
 LOCAL_INSTRUMENTATION_FOR:=
 LOCAL_APK_LIBRARIES:=
+LOCAL_RES_LIBRARIES:=
 LOCAL_MANIFEST_INSTRUMENTATION_FOR:=
 LOCAL_AIDL_INCLUDES:=
 LOCAL_JARJAR_RULES:=
@@ -126,6 +128,7 @@
 LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
 LOCAL_NO_CRT:=
 LOCAL_PROPRIETARY_MODULE:=
+LOCAL_OEM_MODULE:=
 LOCAL_PRIVILEGED_MODULE:=
 LOCAL_MODULE_OWNER:=
 LOCAL_CTS_TEST_PACKAGE:=
@@ -134,6 +137,7 @@
 LOCAL_ADDRESS_SANITIZER:=
 LOCAL_JAR_EXCLUDE_FILES:=
 LOCAL_JAR_PACKAGES:=
+LOCAL_JAR_EXCLUDE_PACKAGES:=
 LOCAL_LINT_FLAGS:=
 LOCAL_SOURCE_FILES_ALL_GENERATED:= # '',true
 # Don't delete the META_INF dir when merging static Java libraries.
@@ -143,6 +147,7 @@
 LOCAL_POST_INSTALL_CMD:=
 LOCAL_DIST_BUNDLED_BINARIES:=
 LOCAL_HAL_STATIC_LIBRARIES:=
+LOCAL_RMTYPEDEFS:=
 LOCAL_NO_SYNTAX_CHECK:=
 LOCAL_NO_STATIC_ANALYZER:=
 LOCAL_32_BIT_ONLY:= # '',true
diff --git a/core/combo/HOST_linux-x86.mk b/core/combo/HOST_linux-x86.mk
index c931937..3ca7443 100644
--- a/core/combo/HOST_linux-x86.mk
+++ b/core/combo/HOST_linux-x86.mk
@@ -31,7 +31,7 @@
 $(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.11-4.6/
 
 # We expect SSE3 floating point math.
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -mstackrealign -msse3 -mfpmath=sse -m32 -Wa,--noexecstack
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -mstackrealign -msse3 -mfpmath=sse -m32 -Wa,--noexecstack -march=prescott
 $(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -m32 -Wl,-z,noexecstack
 
 ifneq ($(strip $(BUILD_HOST_static)),)
diff --git a/core/combo/HOST_windows-x86.mk b/core/combo/HOST_windows-x86.mk
index 4d871d8..fdb72a7 100644
--- a/core/combo/HOST_windows-x86.mk
+++ b/core/combo/HOST_windows-x86.mk
@@ -28,9 +28,14 @@
 HOST_ACP_UNAVAILABLE := true
 TOOLS_EXE_SUFFIX :=
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW
-TOOLS_PREFIX := /usr/bin/i586-mingw32msvc-
-$(combo_2nd_arch_prefix)HOST_C_INCLUDES += /usr/lib/gcc/i586-mingw32msvc/3.4.4/include
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LD_DIRS += -L/usr/i586-mingw32msvc/lib
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -Wno-unused-parameter
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += --sysroot=prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -m32
+$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -m32
+TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
+$(combo_2nd_arch_prefix)HOST_C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
+$(combo_2nd_arch_prefix)HOST_C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
+$(combo_2nd_arch_prefix)HOST_GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib32
 endif # USE_MINGW
 endif # Linux
 
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 68737a3..df81cd5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -49,6 +49,7 @@
 endif
 
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
@@ -94,7 +95,7 @@
 android_config_h := $(call select-android-config-h,linux-arm)
 
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += \
-			-msoft-float -fpic -fPIE \
+			-msoft-float \
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
@@ -114,7 +115,7 @@
 # into no-op in some builds while mesg is defined earlier. So we explicitly
 # disable "-Wunused-but-set-variable" here.
 ifneq ($(filter 4.6 4.6.% 4.7 4.7.% 4.8, $($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)),)
-$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -Wno-unused-but-set-variable -fno-builtin-sin \
+$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -fno-builtin-sin \
 			-fno-strict-volatile-bitfields
 endif
 
@@ -164,39 +165,8 @@
         $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-libgcc-file-name)
 $(combo_2nd_arch_prefix)TARGET_LIBATOMIC := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) \
         $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) -print-file-name=libatomic.a)
-target_libgcov := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) \
-        -print-file-name=libgcov.a)
 endif
 
-# Define FDO (Feedback Directed Optimization) options.
-
-$(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS:=
-$(combo_2nd_arch_prefix)TARGET_FDO_LIB:=
-
-ifneq ($(strip $(BUILD_FDO_INSTRUMENT)),)
-  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-  # The profile will be generated on /data/local/tmp/profile on the device.
-  $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-generate=/data/local/tmp/profile -DANDROID_FDO
-  $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-else
-  # If BUILD_FDO_INSTRUMENT is turned off, then consider doing the FDO optimizations.
-  # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-  ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH := fdo/profiles/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)
-  else
-    ifeq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-      $(warning Custom $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH supplied, but directory does not exist. Turn off FDO.)
-    endif
-  endif
-
-  # If the FDO profile directory can't be found, then FDO is off.
-  ifneq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-use=$($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH) -DANDROID_FDO
-    $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-  endif
-endif
-
-
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-$(TARGET_$(combo_2nd_arch_prefix)ARCH)
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
@@ -226,7 +196,7 @@
 $(hide) $(PRIVATE_CXX) \
 	-nostdlib -Wl,-soname,$(notdir $@) \
 	-Wl,--gc-sections \
-	-shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -237,18 +207,18 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -fPIE -pie \
+$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -pie \
 	-Wl,-dynamic-linker,/system/bin/linker \
 	-Wl,--gc-sections \
 	-Wl,-z,nocopyreloc \
@@ -263,14 +233,14 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
@@ -289,7 +259,6 @@
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(call normalize-target-libraries,$(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/TARGET_linux-arm64.mk b/core/combo/TARGET_linux-arm64.mk
index 02c4b99..07d3984 100644
--- a/core/combo/TARGET_linux-arm64.mk
+++ b/core/combo/TARGET_linux-arm64.mk
@@ -35,7 +35,7 @@
 endif
 
 # Decouple NDK library selection with platform compiler version
-TARGET_NDK_GCC_VERSION := 4.8
+TARGET_NDK_GCC_VERSION := 4.9
 
 ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
 TARGET_GCC_VERSION := 4.9
@@ -49,6 +49,7 @@
 endif
 
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
@@ -72,7 +73,6 @@
 android_config_h := $(call select-android-config-h,linux-arm64)
 
 TARGET_GLOBAL_CFLAGS += \
-			-fpic -fPIE \
 			-fstack-protector \
 			-ffunction-sections \
 			-fdata-sections \
@@ -162,7 +162,7 @@
 $(hide) $(PRIVATE_CXX) \
 	-nostdlib -Wl,-soname,$(notdir $@) \
 	-Wl,--gc-sections \
-	-shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -173,7 +173,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -185,7 +184,7 @@
 endef
 
 define transform-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -fPIE -pie \
+$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -pie \
 	-Wl,-dynamic-linker,/system/bin/linker64 \
 	-Wl,--gc-sections \
 	-Wl,-z,nocopyreloc \
@@ -200,7 +199,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -227,7 +225,6 @@
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(call normalize-target-libraries,$(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
index e505a6b..995e63c 100644
--- a/core/combo/TARGET_linux-mips.mk
+++ b/core/combo/TARGET_linux-mips.mk
@@ -38,7 +38,7 @@
 $(combo_2nd_arch_prefix)TARGET_NDK_GCC_VERSION := 4.8
 
 ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
-$(combo_2nd_arch_prefix)TARGET_GCC_VERSION := 4.8
+$(combo_2nd_arch_prefix)TARGET_GCC_VERSION := 4.9
 else
 $(combo_2nd_arch_prefix)TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
 endif
@@ -49,11 +49,12 @@
 endif
 
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
-$(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/mips/mipsel-linux-android-$($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)
-$(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/mipsel-linux-android-
+$(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT := prebuilts/gcc/$(HOST_PREBUILT_TAG)/mips/mips64el-linux-android-$($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)
+$(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/mips64el-linux-android-
 endif
 
 $(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
@@ -83,7 +84,6 @@
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += \
 			$(TARGET_mips_CFLAGS) \
 			-U__unix -U__unix__ -Umips \
-			-fpic -fPIE\
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
@@ -96,24 +96,6 @@
 			-include $(android_config_h) \
 			-I $(dir $(android_config_h))
 
-# This warning causes dalvik not to build with gcc 4.6+ and -Werror.
-# We cannot turn it off blindly since the option is not available
-# in gcc-4.4.x.
-ifneq ($(filter 4.6 4.6.% 4.7 4.7.% 4.8, $($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)),)
-$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -Wno-unused-but-set-variable \
-                        -fno-strict-volatile-bitfields
-endif
-
-# This is to avoid the dreaded warning compiler message:
-#   note: the mangling of 'va_list' has changed in GCC 4.4
-#
-# The fact that the mangling changed does not affect the NDK ABI
-# very fortunately (since none of the exposed APIs used va_list
-# in their exported C++ functions). Also, GCC 4.5 has already
-# removed the warning from the compiler.
-#
-$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -Wno-psabi
-
 ifneq ($(ARCH_MIPS_PAGE_SHIFT),)
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -DPAGE_SHIFT=$(ARCH_MIPS_PAGE_SHIFT)
 endif
@@ -155,39 +137,8 @@
 ifneq ($(LIBGCC_EH),libgcc_eh.a)
   $(combo_2nd_arch_prefix)TARGET_LIBGCC += $(LIBGCC_EH)
 endif
-target_libgcov := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) \
-        --print-file-name=libgcov.a)
 endif
 
-# Define FDO (Feedback Directed Optimization) options.
-
-$(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS:=
-$(combo_2nd_arch_prefix)TARGET_FDO_LIB:=
-
-ifneq ($(strip $(BUILD_FDO_INSTRUMENT)),)
-  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-  # The profile will be generated on /data/local/tmp/profile on the device.
-  $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-generate=/data/local/tmp/profile -DANDROID_FDO
-  $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-else
-  # If BUILD_FDO_INSTRUMENT is turned off, then consider doing the FDO optimizations.
-  # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-  ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH := fdo/profiles/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)
-  else
-    ifeq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-      $(warning Custom $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH supplied, but directory does not exist. Turn off FDO.)
-    endif
-  endif
-
-  # If the FDO profile directory can't be found, then FDO is off.
-  ifneq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-use=$($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH) -DANDROID_FDO
-    $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-  endif
-endif
-
-
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-mips # mips covers both mips and mips64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
@@ -217,7 +168,7 @@
 $(hide) $(PRIVATE_CXX) \
 	-nostdlib -Wl,-soname,$(notdir $@) \
 	-Wl,--gc-sections \
-	-shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -228,18 +179,18 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -fPIE -pie \
+$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -pie \
 	-Wl,-dynamic-linker,/system/bin/linker \
 	-Wl,--gc-sections \
 	-Wl,-z,nocopyreloc \
@@ -254,14 +205,14 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
@@ -280,7 +231,6 @@
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(call normalize-target-libraries,$(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/TARGET_linux-mips64.mk b/core/combo/TARGET_linux-mips64.mk
index aa456ef..4511b47 100644
--- a/core/combo/TARGET_linux-mips64.mk
+++ b/core/combo/TARGET_linux-mips64.mk
@@ -31,14 +31,14 @@
 # version.
 #
 ifeq ($(strip $(TARGET_ARCH_VARIANT)),)
-TARGET_ARCH_VARIANT := mips64r2
+TARGET_ARCH_VARIANT := mips64r6
 endif
 
 # Decouple NDK library selection with platform compiler version
 TARGET_NDK_GCC_VERSION := 4.8
 
 ifeq ($(strip $(TARGET_GCC_VERSION_EXP)),)
-TARGET_GCC_VERSION := 4.8
+TARGET_GCC_VERSION := 4.9
 else
 TARGET_GCC_VERSION := $(TARGET_GCC_VERSION_EXP)
 endif
@@ -48,10 +48,8 @@
 $(error Unknown MIPS architecture variant: $(TARGET_ARCH_VARIANT))
 endif
 
-# TODO: Enable Clang when its mips64 prebuilt is added
-WITHOUT_TARGET_CLANG := true
-
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
@@ -86,7 +84,6 @@
 TARGET_GLOBAL_CFLAGS += \
 			$(TARGET_mips_CFLAGS) \
 			-U__unix -U__unix__ -Umips \
-			-fpic -fPIE\
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
@@ -99,24 +96,6 @@
 			-include $(android_config_h) \
 			-I $(dir $(android_config_h))
 
-# This warning causes dalvik not to build with gcc 4.6+ and -Werror.
-# We cannot turn it off blindly since the option is not available
-# in gcc-4.4.x.
-ifneq ($(filter 4.6 4.6.% 4.7 4.7.% 4.8, $(TARGET_GCC_VERSION)),)
-TARGET_GLOBAL_CFLAGS += -Wno-unused-but-set-variable \
-                        -fno-strict-volatile-bitfields
-endif
-
-# This is to avoid the dreaded warning compiler message:
-#   note: the mangling of 'va_list' has changed in GCC 4.4
-#
-# The fact that the mangling changed does not affect the NDK ABI
-# very fortunately (since none of the exposed APIs used va_list
-# in their exported C++ functions). Also, GCC 4.5 has already
-# removed the warning from the compiler.
-#
-TARGET_GLOBAL_CFLAGS += -Wno-psabi
-
 ifneq ($(ARCH_MIPS_PAGE_SHIFT),)
 TARGET_GLOBAL_CFLAGS += -DPAGE_SHIFT=$(ARCH_MIPS_PAGE_SHIFT)
 endif
@@ -159,39 +138,8 @@
 ifneq ($(LIBGCC_EH),libgcc_eh.a)
   TARGET_LIBGCC += $(LIBGCC_EH)
 endif
-target_libgcov := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-        --print-file-name=libgcov.a)
 endif
 
-# Define FDO (Feedback Directed Optimization) options.
-
-TARGET_FDO_CFLAGS:=
-TARGET_FDO_LIB:=
-
-ifneq ($(strip $(BUILD_FDO_INSTRUMENT)),)
-  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-  # The profile will be generated on /data/local/tmp/profile on the device.
-  TARGET_FDO_CFLAGS := -fprofile-generate=/data/local/tmp/profile -DANDROID_FDO
-  TARGET_FDO_LIB := $(target_libgcov)
-else
-  # If BUILD_FDO_INSTRUMENT is turned off, then consider doing the FDO optimizations.
-  # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-  ifeq ($(strip $(TARGET_FDO_PROFILE_PATH)),)
-    TARGET_FDO_PROFILE_PATH := fdo/profiles/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT)
-  else
-    ifeq ($(strip $(wildcard $(TARGET_FDO_PROFILE_PATH))),)
-      $(warning Custom TARGET_FDO_PROFILE_PATH supplied, but directory does not exist. Turn off FDO.)
-    endif
-  endif
-
-  # If the FDO profile directory can't be found, then FDO is off.
-  ifneq ($(strip $(wildcard $(TARGET_FDO_PROFILE_PATH))),)
-    TARGET_FDO_CFLAGS := -fprofile-use=$(TARGET_FDO_PROFILE_PATH) -DANDROID_FDO
-    TARGET_FDO_LIB := $(target_libgcov)
-  endif
-endif
-
-
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-mips
 # TODO: perhaps use $(libc_root)/kernel/uapi/asm-$(TARGET_ARCH) instead of asm-mips ?
@@ -224,7 +172,7 @@
 $(hide) $(PRIVATE_CXX) \
 	-nostdlib -Wl,-soname,$(notdir $@) \
 	-Wl,--gc-sections \
-	-shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -235,7 +183,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -247,7 +194,7 @@
 endef
 
 define transform-o-to-executable-inner
-$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -fPIE -pie \
+$(hide) $(PRIVATE_CXX) -nostdlib -Bdynamic -pie \
 	-Wl,-dynamic-linker,/system/bin/linker64 \
 	-Wl,--gc-sections \
 	-Wl,-z,nocopyreloc \
@@ -262,7 +209,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
@@ -289,7 +235,6 @@
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(call normalize-target-libraries,$(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/TARGET_linux-x86.mk b/core/combo/TARGET_linux-x86.mk
index 4c00891..0af3948 100644
--- a/core/combo/TARGET_linux-x86.mk
+++ b/core/combo/TARGET_linux-x86.mk
@@ -41,7 +41,7 @@
 endif
 
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)),)
@@ -62,8 +62,6 @@
 	$(shell $($(combo_2nd_arch_prefix)TARGET_CC) -m32 -print-file-name=libgcc.a)
 $(combo_2nd_arch_prefix)TARGET_LIBATOMIC := \
 	$(shell $($(combo_2nd_arch_prefix)TARGET_CC) -m32 -print-file-name=libatomic.a)
-target_libgcov := $(shell $($(combo_2nd_arch_prefix)TARGET_CC) $($(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS) \
-	-print-file-name=libgcov.a)
 endif
 
 $(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
@@ -72,34 +70,6 @@
 libm_root := bionic/libm
 libstdc++_root := bionic/libstdc++
 
-# Define FDO (Feedback Directed Optimization) options.
-
-$(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS:=
-$(combo_2nd_arch_prefix)TARGET_FDO_LIB:=
-
-ifneq ($(strip $(BUILD_FDO_INSTRUMENT)),)
-  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-  # The profile will be generated on /data/local/tmp/profile on the device.
-  $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-generate=/data/local/tmp/profile -DANDROID_FDO
-  $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-else
-  # If BUILD_FDO_INSTRUMENT is turned off, then consider doing the FDO optimizations.
-  # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-  ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH := fdo/profiles/$(TARGET_$(combo_2nd_arch_prefix)ARCH)/$(TARGET_$(combo_2nd_arch_prefix)ARCH_VARIANT)
-  else
-    ifeq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-      $(warning Custom $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH supplied, but directory does not exist. Turn off FDO.)
-    endif
-  endif
-
-  # If the FDO profile directory can't be found, then FDO is off.
-  ifneq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH))),)
-    $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-use=$($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH) -DANDROID_FDO
-    $(combo_2nd_arch_prefix)TARGET_FDO_LIB := $(target_libgcov)
-  endif
-endif
-
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-x86 # x86 covers both x86 and x86_64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
@@ -112,18 +82,15 @@
 			-Werror=format-security \
 			-D_FORTIFY_SOURCE=2 \
 			-Wstrict-aliasing=2 \
-			-fPIC -fPIE \
 			-ffunction-sections \
 			-finline-functions \
 			-finline-limit=300 \
-			-fno-inline-functions-called-once \
 			-fno-short-enums \
 			-fstrict-aliasing \
 			-funswitch-loops \
 			-funwind-tables \
 			-fstack-protector \
 			-m32 \
-			-msse2 \
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
 			-include $(android_config_h) \
@@ -182,7 +149,7 @@
 $(hide) $(PRIVATE_CXX) \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	 -nostdlib -Wl,-soname,$(notdir $@) \
-	 -shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -193,13 +160,13 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
@@ -207,7 +174,7 @@
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	-nostdlib -Bdynamic \
 	-Wl,-z,nocopyreloc \
-	-fPIE -pie \
+	-pie \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	-Wl,-rpath-link=$(PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O)) \
@@ -219,13 +186,13 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+	$(PRIVATE_LDLIBS)
 endef
 
 define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
@@ -242,7 +209,6 @@
 	-Wl,--no-whole-archive \
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/TARGET_linux-x86_64.mk b/core/combo/TARGET_linux-x86_64.mk
index f6a9fc8..33d6a56 100644
--- a/core/combo/TARGET_linux-x86_64.mk
+++ b/core/combo/TARGET_linux-x86_64.mk
@@ -41,7 +41,7 @@
 endif
 
 include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
-
+include $(BUILD_SYSTEM)/combo/fdo.mk
 
 # You can set TARGET_TOOLS_PREFIX to get gcc from somewhere else
 ifeq ($(strip $(TARGET_TOOLS_PREFIX)),)
@@ -62,8 +62,6 @@
 	$(shell $(TARGET_CC) -m64 -print-file-name=libgcc.a)
 TARGET_LIBATOMIC := \
 	$(shell $(TARGET_CC) -m64 -print-file-name=libatomic.a)
-target_libgcov := $(shell $(TARGET_CC) $(TARGET_GLOBAL_CFLAGS) \
-	-print-file-name=libgcov.a)
 endif
 
 TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
@@ -72,34 +70,6 @@
 libm_root := bionic/libm
 libstdc++_root := bionic/libstdc++
 
-# Define FDO (Feedback Directed Optimization) options.
-
-TARGET_FDO_CFLAGS:=
-TARGET_FDO_LIB:=
-
-ifneq ($(strip $(BUILD_FDO_INSTRUMENT)),)
-  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
-  # The profile will be generated on /data/local/tmp/profile on the device.
-  TARGET_FDO_CFLAGS := -fprofile-generate=/data/local/tmp/profile -DANDROID_FDO
-  TARGET_FDO_LIB := $(target_libgcov)
-else
-  # If BUILD_FDO_INSTRUMENT is turned off, then consider doing the FDO optimizations.
-  # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
-  ifeq ($(strip $(TARGET_FDO_PROFILE_PATH)),)
-    TARGET_FDO_PROFILE_PATH := fdo/profiles/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT)
-  else
-    ifeq ($(strip $(wildcard $(TARGET_FDO_PROFILE_PATH))),)
-      $(warning Custom TARGET_FDO_PROFILE_PATH supplied, but directory does not exist. Turn off FDO.)
-    endif
-  endif
-
-  # If the FDO profile directory can't be found, then FDO is off.
-  ifneq ($(strip $(wildcard $(TARGET_FDO_PROFILE_PATH))),)
-    TARGET_FDO_CFLAGS := -fprofile-use=$(TARGET_FDO_PROFILE_PATH) -DANDROID_FDO
-    TARGET_FDO_LIB := $(target_libgcov)
-  endif
-endif
-
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-x86 # x86 covers both x86 and x86_64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
@@ -110,11 +80,9 @@
 			-Werror=format-security \
 			-D_FORTIFY_SOURCE=2 \
 			-Wstrict-aliasing=2 \
-			-fPIC -fPIE \
 			-ffunction-sections \
 			-finline-functions \
 			-finline-limit=300 \
-			-fno-inline-functions-called-once \
 			-fno-short-enums \
 			-fstrict-aliasing \
 			-funswitch-loops \
@@ -186,7 +154,7 @@
 $(hide) $(PRIVATE_CXX) \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	 -nostdlib -Wl,-soname,$(notdir $@) \
-	 -shared \
+	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl,-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -197,7 +165,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_LDFLAGS) \
@@ -211,7 +178,7 @@
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	-nostdlib -Bdynamic \
 	-Wl,-z,nocopyreloc \
-	-fPIE -pie \
+	-pie \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	-Wl,-rpath-link=$(PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES) \
 	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O)) \
@@ -223,7 +190,6 @@
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
 	$(if $(TARGET_BUILD_APPS),$(PRIVATE_TARGET_LIBGCC)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_LDFLAGS) \
@@ -247,7 +213,6 @@
 	-Wl,--no-whole-archive \
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
-	$(PRIVATE_TARGET_FDO_LIB) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
 	-Wl,--end-group \
diff --git a/core/combo/arch/mips/mips32-fp.mk b/core/combo/arch/mips/mips32-fp.mk
index e60bdac..8320e93 100644
--- a/core/combo/arch/mips/mips32-fp.mk
+++ b/core/combo/arch/mips/mips32-fp.mk
@@ -4,11 +4,9 @@
 ARCH_MIPS_HAS_FPU	:=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips32 \
-    -mtune=mips32 \
     -mips32 \
-    -mhard-float
+    -mfp32 \
+    -modd-spreg \
 
 arch_variant_ldflags := \
-    -EL
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32.mk b/core/combo/arch/mips/mips32.mk
deleted file mode 100644
index 35ef0d8..0000000
--- a/core/combo/arch/mips/mips32.mk
+++ /dev/null
@@ -1,12 +0,0 @@
-# Configuration for Android on MIPS.
-# Generating binaries for MIPS32/soft-float/little-endian
-
-arch_variant_cflags := \
-    -EL \
-    -march=mips32 \
-    -mtune=mips32 \
-    -mips32 \
-    -msoft-float
-
-arch_variant_ldflags := \
-    -EL
diff --git a/core/combo/arch/mips/mips32r2-fp-xburst.mk b/core/combo/arch/mips/mips32r2-fp-xburst.mk
index 8b0fef1..2b4f714 100644
--- a/core/combo/arch/mips/mips32r2-fp-xburst.mk
+++ b/core/combo/arch/mips/mips32r2-fp-xburst.mk
@@ -5,13 +5,11 @@
 ARCH_MIPS_HAS_FPU :=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
     -mips32r2 \
-    -mhard-float \
+    -mfp32 \
+    -modd-spreg \
     -mno-fused-madd \
     -Wa,-mmxu
 
 arch_variant_ldflags := \
-    -EL
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32r2-fp.mk b/core/combo/arch/mips/mips32r2-fp.mk
index 08d91df..9acb018 100644
--- a/core/combo/arch/mips/mips32r2-fp.mk
+++ b/core/combo/arch/mips/mips32r2-fp.mk
@@ -4,12 +4,10 @@
 ARCH_MIPS_HAS_FPU	:=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
     -mips32r2 \
-    -mhard-float \
+    -mfp32 \
+    -modd-spreg \
     -msynci
 
 arch_variant_ldflags := \
-    -EL
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32r2.mk b/core/combo/arch/mips/mips32r2.mk
deleted file mode 100644
index 16ce76f..0000000
--- a/core/combo/arch/mips/mips32r2.mk
+++ /dev/null
@@ -1,13 +0,0 @@
-# Configuration for Android on MIPS.
-# Generating binaries for MIPS32R2/soft-float/little-endian
-
-arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
-    -mips32r2 \
-    -msoft-float \
-    -msynci
-
-arch_variant_ldflags := \
-    -EL
diff --git a/core/combo/arch/mips/mips32r2dsp-fp.mk b/core/combo/arch/mips/mips32r2dsp-fp.mk
index fe2b1fe..c4b49b6 100644
--- a/core/combo/arch/mips/mips32r2dsp-fp.mk
+++ b/core/combo/arch/mips/mips32r2dsp-fp.mk
@@ -6,13 +6,11 @@
 ARCH_MIPS_HAS_FPU       :=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
     -mips32r2 \
-    -mhard-float \
+    -mfp32 \
+    -modd-spreg \
     -mdsp \
     -msynci
 
 arch_variant_ldflags := \
-    -EL
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32r2dsp.mk b/core/combo/arch/mips/mips32r2dsp.mk
deleted file mode 100644
index 8a8976c..0000000
--- a/core/combo/arch/mips/mips32r2dsp.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-# Configuration for Android on MIPS.
-# Generating binaries for MIPS32R2/soft-float/little-endian/dsp
-
-ARCH_MIPS_HAS_DSP  	:=true
-ARCH_MIPS_DSP_REV	:=1
-
-arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
-    -mips32r2 \
-    -msoft-float \
-    -mdsp \
-    -msynci
-
-arch_variant_ldflags := \
-    -EL
diff --git a/core/combo/arch/mips/mips32r2dspr2-fp.mk b/core/combo/arch/mips/mips32r2dspr2-fp.mk
index 7e882b3..8b05ffc 100644
--- a/core/combo/arch/mips/mips32r2dspr2-fp.mk
+++ b/core/combo/arch/mips/mips32r2dspr2-fp.mk
@@ -6,13 +6,11 @@
 ARCH_MIPS_HAS_FPU       :=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
     -mips32r2 \
-    -mhard-float \
+    -mfp32 \
+    -modd-spreg \
     -mdspr2 \
     -msynci
 
 arch_variant_ldflags := \
-    -EL
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32r2dspr2.mk b/core/combo/arch/mips/mips32r2dspr2.mk
deleted file mode 100644
index c311523..0000000
--- a/core/combo/arch/mips/mips32r2dspr2.mk
+++ /dev/null
@@ -1,17 +0,0 @@
-# Configuration for Android on MIPS.
-# Generating binaries for MIPS32R2/soft-float/little-endian/dsp
-
-ARCH_MIPS_HAS_DSP  	:=true
-ARCH_MIPS_DSP_REV	:=2
-
-arch_variant_cflags := \
-    -EL \
-    -march=mips32r2 \
-    -mtune=mips32r2 \
-    -mips32r2 \
-    -msoft-float \
-    -mdspr2 \
-    -msynci
-
-arch_variant_ldflags := \
-    -EL
diff --git a/core/combo/arch/mips/mips32r6.mk b/core/combo/arch/mips/mips32r6.mk
new file mode 100644
index 0000000..315aa60
--- /dev/null
+++ b/core/combo/arch/mips/mips32r6.mk
@@ -0,0 +1,12 @@
+# Configuration for Android on MIPS.
+# Generating binaries for MIPS32R6/hard-float/little-endian
+
+ARCH_MIPS_REV6 := true
+arch_variant_cflags := \
+    -mips32r6 \
+    -mfp64 \
+    -mno-odd-spreg \
+    -msynci
+
+arch_variant_ldflags := \
+    -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips64/mips64r2.mk b/core/combo/arch/mips64/mips64r2.mk
index 298aeaf..c5710d0 100644
--- a/core/combo/arch/mips64/mips64r2.mk
+++ b/core/combo/arch/mips64/mips64r2.mk
@@ -1,14 +1,10 @@
 # Configuration for Android on mips64r2.
 
+# This target is for temporary use only, until mips64r6 is supported by Android's qemu.
+
 ARCH_MIPS_HAS_FPU	:=true
 ARCH_HAVE_ALIGNED_DOUBLES :=true
 arch_variant_cflags := \
-    -EL \
-    -march=mips64r2 \
-    -mtune=mips64r2 \
     -mips64r2 \
-    -mhard-float \
     -msynci
 
-arch_variant_ldflags := \
-    -EL
diff --git a/core/combo/arch/mips64/mips64r6.mk b/core/combo/arch/mips64/mips64r6.mk
new file mode 100644
index 0000000..443de20
--- /dev/null
+++ b/core/combo/arch/mips64/mips64r6.mk
@@ -0,0 +1,7 @@
+# Configuration for Android on mips64r6.
+
+ARCH_MIPS_REV6 := true
+arch_variant_cflags := \
+    -mips64r6 \
+    -msynci
+
diff --git a/core/combo/fdo.mk b/core/combo/fdo.mk
new file mode 100644
index 0000000..9e331b6
--- /dev/null
+++ b/core/combo/fdo.mk
@@ -0,0 +1,39 @@
+#
+# Copyright (C) 2006 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Setup FDO related flags.
+
+$(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS:=
+
+ifeq ($(strip $(BUILD_FDO_INSTRUMENT)), true)
+  # Set BUILD_FDO_INSTRUMENT=true to turn on FDO instrumentation.
+  # The profile will be generated on /sdcard/fdo_profile on the device.
+  $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-generate=/sdcard/fdo_profile -DANDROID_FDO
+  $(combo_2nd_arch_prefix)TARGET_FDO_LDFLAGS := -lgcov -lgcc
+else
+  ifeq ($(strip $(BUILD_FDO_OPTIMIZE)), true)
+    # Set TARGET_FDO_PROFILE_PATH to set a custom profile directory for your build.
+    ifeq ($(strip $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)),)
+      $(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH := vendor/google_data/fdo_profile
+    endif
+
+    ifneq ($(strip $(wildcard $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)/$(PRODUCT_OUT))),)
+      $(combo_2nd_arch_prefix)TARGET_FDO_CFLAGS := -fprofile-use=$($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH) -DANDROID_FDO -fprofile-correction -Wcoverage-mismatch -Wno-error
+    else
+      $(warning Profile directory $($(combo_2nd_arch_prefix)TARGET_FDO_PROFILE_PATH)/$(PRODUCT_OUT) does not exist. Turn off FDO.)
+    endif
+  endif
+endif
diff --git a/core/combo/include/arch/darwin-x86/AndroidConfig.h b/core/combo/include/arch/darwin-x86/AndroidConfig.h
index 44de4cd..54f3750 100644
--- a/core/combo/include/arch/darwin-x86/AndroidConfig.h
+++ b/core/combo/include/arch/darwin-x86/AndroidConfig.h
@@ -56,13 +56,6 @@
 #define HAVE_FORKEXEC
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-/* #define HAVE_OOM_ADJ */
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-arm/AndroidConfig.h b/core/combo/include/arch/linux-arm/AndroidConfig.h
index 0eb6c72..c06c8bc 100644
--- a/core/combo/include/arch/linux-arm/AndroidConfig.h
+++ b/core/combo/include/arch/linux-arm/AndroidConfig.h
@@ -63,13 +63,6 @@
 #define HAVE_FORKEXEC
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-mips/AndroidConfig.h b/core/combo/include/arch/linux-mips/AndroidConfig.h
index 076d711..bb3dc95 100644
--- a/core/combo/include/arch/linux-mips/AndroidConfig.h
+++ b/core/combo/include/arch/linux-mips/AndroidConfig.h
@@ -63,13 +63,6 @@
 #define HAVE_FORKEXEC
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-x86/AndroidConfig.h b/core/combo/include/arch/linux-x86/AndroidConfig.h
index ebb95b0..5523e49 100644
--- a/core/combo/include/arch/linux-x86/AndroidConfig.h
+++ b/core/combo/include/arch/linux-x86/AndroidConfig.h
@@ -56,13 +56,6 @@
 #define HAVE_FORKEXEC
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/target_linux-x86/AndroidConfig.h b/core/combo/include/arch/target_linux-x86/AndroidConfig.h
index 5b56b51..c267b2b 100644
--- a/core/combo/include/arch/target_linux-x86/AndroidConfig.h
+++ b/core/combo/include/arch/target_linux-x86/AndroidConfig.h
@@ -49,13 +49,6 @@
 #define HAVE_FORKEXEC
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/windows/AndroidConfig.h b/core/combo/include/arch/windows/AndroidConfig.h
index 0a52674..204740d 100644
--- a/core/combo/include/arch/windows/AndroidConfig.h
+++ b/core/combo/include/arch/windows/AndroidConfig.h
@@ -83,13 +83,6 @@
 #endif
 
 /*
- * Process out-of-memory adjustment.  Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-/* #define HAVE_OOM_ADJ */
-
-/*
  * IPC model.  Choose one:
  *
  * HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/select.mk b/core/combo/select.mk
index e18cb1b..d495c6f 100644
--- a/core/combo/select.mk
+++ b/core/combo/select.mk
@@ -82,6 +82,10 @@
   # on a workstation.
   export CCACHE_BASEDIR := /
 
+  # Workaround for ccache with clang.
+  # See http://petereisentraut.blogspot.com/2011/09/ccache-and-clang-part-2.html
+  export CCACHE_CPP2 := true
+
   CCACHE_HOST_TAG := $(HOST_PREBUILT_TAG)
   # If we are cross-compiling Windows binaries on Linux
   # then use the linux ccache binary instead.
diff --git a/core/config.mk b/core/config.mk
index ad78ffa..bfa5610 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -18,6 +18,14 @@
 empty :=
 space := $(empty) $(empty)
 comma := ,
+# Note that make will eat the newline just before endef.
+define newline
+
+
+endef
+# Unfortunately you can't simply define backslash as \ or \\.
+backslash := \a
+backslash := $(patsubst %a,%,$(backslash))
 
 # Tell python not to spam the source tree with .pyc files.  This
 # only has an effect on python 2.6 and above.
@@ -37,13 +45,13 @@
 	$(TOPDIR)frameworks/native/include \
 	$(TOPDIR)frameworks/native/opengl/include \
 	$(TOPDIR)frameworks/av/include \
-	$(TOPDIR)frameworks/base/include \
-	$(TOPDIR)external/skia/include
+	$(TOPDIR)frameworks/base/include
 SRC_HOST_HEADERS:=$(TOPDIR)tools/include
 SRC_LIBRARIES:= $(TOPDIR)libs
 SRC_SERVERS:= $(TOPDIR)servers
 SRC_TARGET_DIR := $(TOPDIR)build/target
 SRC_API_DIR := $(TOPDIR)prebuilts/sdk/api
+SRC_SYSTEM_API_DIR := $(TOPDIR)prebuilts/sdk/system-api
 
 # Some specific paths to tools
 SRC_DROIDDOC_DIR := $(TOPDIR)build/tools/droiddoc
@@ -387,6 +395,8 @@
 MKEXT2IMG := $(HOST_OUT_EXECUTABLES)/genext2fs$(HOST_EXECUTABLE_SUFFIX)
 MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg.sh
+MAKE_F2FS := $(HOST_OUT_EXECUTABLES)/make_f2fs$(HOST_EXECUTABLE_SUFFIX)
+MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
 MKEXT2BOOTIMG := external/genext2fs/mkbootimg_ext2.sh
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
@@ -399,6 +409,11 @@
 LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc$(HOST_EXECUTABLE_SUFFIX)
 BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat$(HOST_EXECUTABLE_SUFFIX)
 LINT := prebuilts/sdk/tools/lint
+RMTYPEDEFS := $(HOST_OUT_EXECUTABLES)/rmtypedefs
+APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
+VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
+BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
+BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
 
 # ACP is always for the build OS, not for the host OS
 ACP := $(BUILD_OUT_EXECUTABLES)/acp$(BUILD_EXECUTABLE_SUFFIX)
@@ -406,7 +421,8 @@
 # dx is java behind a shell script; no .exe necessary.
 DX := $(HOST_OUT_EXECUTABLES)/dx
 ZIPALIGN := $(HOST_OUT_EXECUTABLES)/zipalign$(HOST_EXECUTABLE_SUFFIX)
-FINDBUGS := prebuilt/common/findbugs/bin/findbugs
+FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
+FINDBUGS := $(FINDBUGS_DIR)/findbugs
 EMMA_JAR := external/emma/lib/emma$(COMMON_JAVA_PACKAGE_SUFFIX)
 
 # Tool to merge AndroidManifest.xmls
@@ -526,7 +542,7 @@
 
 # allow overriding default Java libraries on a per-target basis
 ifeq ($(TARGET_DEFAULT_JAVA_LIBRARIES),)
-  TARGET_DEFAULT_JAVA_LIBRARIES := core-libart core-junit ext framework framework2
+  TARGET_DEFAULT_JAVA_LIBRARIES := core-libart core-junit ext framework
 endif
 
 TARGET_CPU_SMP ?= true
@@ -579,7 +595,13 @@
     $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android.jar,%, \
     $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android.jar)))
 
+# We don't have prebuilt system_current SDK yet.
+TARGET_AVAILABLE_SDK_VERSIONS := $(TARGET_AVAILABLE_SDK_VERSIONS)
+
 INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
+INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
+INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
+INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
 
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
@@ -590,7 +612,7 @@
 
 # Set up RS prebuilt variables for compatibility library
 
-RS_PREBUILT_CLCORE := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/libclcore.bc
+RS_PREBUILT_CLCORE := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/librsrt_$(TARGET_ARCH).bc
 RS_PREBUILT_LIBPATH := -L prebuilts/ndk/8/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
 RS_PREBUILT_COMPILER_RT := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/libcompiler_rt.a
 
diff --git a/core/definitions.mk b/core/definitions.mk
index 441c186..38aa720 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -879,12 +879,12 @@
 @echo "Renderscript compatibility: $(notdir $@) <= $(notdir $<)"
 $(hide) mkdir -p $(dir $@)
 $(hide) $(BCC_COMPAT) -O3 -o $(dir $@)/$(notdir $(<:.bc=.o)) -fPIC -shared \
-	-rt-path $(RS_PREBUILT_CLCORE) -mtriple $(RS_TRIPLE) $<
+	-rt-path $(RS_PREBUILT_CLCORE) -mtriple $(RS_COMPAT_TRIPLE) $<
 $(hide) $(PRIVATE_CXX) -shared -Wl,-soname,$(notdir $@) -nostdlib \
 	-Wl,-rpath,\$$ORIGIN/../lib \
 	$(dir $@)/$(notdir $(<:.bc=.o)) \
 	$(RS_PREBUILT_COMPILER_RT) \
-	-o $@ -L prebuilts/gcc/ \
+	-o $@ $(TARGET_GLOBAL_LDFLAGS) -L prebuilts/gcc/ \
 	-L $(TARGET_OUT_INTERMEDIATE_LIBRARIES) $(RS_PREBUILT_LIBPATH) \
 	-lRSSupport -lm -lc
 endef
@@ -1309,7 +1309,7 @@
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	-Wl,-rpath-link=$(PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES) \
 	-Wl,-rpath,\$$ORIGIN/../lib \
-	-shared -Wl,-soname,$(notdir $@) \
+	-Wl,-shared -Wl,-soname,$(notdir $@) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -1413,7 +1413,11 @@
 ifdef BUILD_HOST_static
 HOST_FPIE_FLAGS :=
 else
-HOST_FPIE_FLAGS := -fPIE -pie
+HOST_FPIE_FLAGS := -pie
+# Force the correct entry point to workaround a bug in binutils that manifests with -pie
+ifeq ($(HOST_OS),windows)
+HOST_FPIE_FLAGS += -Wl,-e_mainCRTStartup
+endif
 endif
 
 ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
@@ -1568,7 +1572,7 @@
 $(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
 $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
-	    find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
+          find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
 fi
 $(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
     | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
@@ -1593,12 +1597,21 @@
     -name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
     $(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
     | xargs rm -rf)
-$(if $(PRIVATE_JAR_PACKAGES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type d \
-    $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
-        -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))) \
-    | xargs rm -rf)
-$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
-    $@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
+$(if $(PRIVATE_JAR_PACKAGES), \
+    $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type f \
+        $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
+            -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))/\*) -delete ; \
+        find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -empty -delete)
+$(if $(PRIVATE_JAR_EXCLUDE_PACKAGES), $(hide) rm -rf \
+    $(foreach pkg, $(PRIVATE_JAR_EXCLUDE_PACKAGES), \
+        $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
+$(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
+$(if $(PRIVATE_JAR_MANIFEST), \
+    $(hide) sed -e 's/%BUILD_NUMBER%/$(BUILD_NUMBER)/' \
+            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
+        jar -cfm $@ $(dir $@)/manifest.mf \
+            -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) ., \
+    $(hide) jar -cf $@ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .)
 endef
 
 define transform-java-to-classes.jar
@@ -1644,12 +1657,21 @@
     -name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
     $(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
     | xargs rm -rf)
-$(if $(PRIVATE_JAR_PACKAGES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type d \
-    $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
-        -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))) \
-    | xargs rm -rf)
-$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
-    $@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
+$(if $(PRIVATE_JAR_PACKAGES), \
+    $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type f \
+        $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
+            -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))/\*) -delete ; \
+        find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -empty -delete)
+$(if $(PRIVATE_JAR_EXCLUDE_PACKAGES), $(hide) rm -rf \
+    $(foreach pkg, $(PRIVATE_JAR_EXCLUDE_PACKAGES), \
+        $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
+$(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
+$(if $(PRIVATE_JAR_MANIFEST), \
+    $(hide) sed -e 's/%BUILD_NUMBER%/$(BUILD_NUMBER)/' \
+            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
+        jar -cfm $@ $(dir $@)/manifest.mf \
+            -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) ., \
+    $(hide) jar -cf $@ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .)
 $(hide) mv $(PRIVATE_CLASS_INTERMEDIATES_DIR)/newstamp $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stamp
 endef
 
@@ -1671,9 +1693,10 @@
 define transform-classes.jar-to-dex
 @echo "target Dex: $(PRIVATE_MODULE)"
 @mkdir -p $(dir $@)
+$(hide) rm -f $(dir $@)classes*.dex
 $(hide) $(DX) \
     $(if $(findstring windows,$(HOST_OS)),,-JXms16M -JXmx2048M) \
-    --dex --output=$@ \
+    --dex --output=$(dir $@) \
     $(incremental_dex) \
     $(if $(NO_OPTIMIZE_DX), \
         --no-optimize) \
@@ -1708,7 +1731,7 @@
 define add-assets-to-package
 $(hide) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
     $(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
-    $(addprefix --preferred-configurations , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
+    $(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
     $(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
     $(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
     $(addprefix -A , $(PRIVATE_ASSET_DIR)) \
@@ -1743,11 +1766,7 @@
 
 #TODO: update the manifest to point to the dex file
 define add-dex-to-package
-$(if $(filter classes.dex,$(notdir $(PRIVATE_DEX_FILE))),\
-$(hide) zip -qj $@ $(PRIVATE_DEX_FILE),\
-$(hide) _adtp_classes_dex=$(dir $(PRIVATE_DEX_FILE))classes.dex; \
-cp $(PRIVATE_DEX_FILE) $$_adtp_classes_dex && \
-zip -qj $@ $$_adtp_classes_dex && rm -f $$_adtp_classes_dex)
+$(hide) zip -qj $@ $(dir $(PRIVATE_DEX_FILE))classes*.dex
 endef
 
 # Add java resources added by the current module.
@@ -1996,8 +2015,8 @@
 endef
 
 # $(1): The file(s) to check (often $@)
-# $(2): The maximum total image size, in decimal bytes
-# $(3): the type of filesystem "yaffs" or "raw"
+# $(2): The maximum total image size, in decimal bytes.
+#    Make sure to take into account any reserved space needed for the FS.
 #
 # If $(2) is empty, evaluates to "true"
 #
@@ -2010,15 +2029,9 @@
   total=$$(( $$( echo "$$size" ) )); \
   printname=$$(echo -n "$(1)" | tr " " +); \
   img_blocksize=$(call image-size-from-data-size,$(BOARD_FLASH_BLOCK_SIZE)); \
-  if [ "$(3)" == "yaffs" ]; then \
-    reservedblocks=8; \
-  else \
-    reservedblocks=0; \
-  fi; \
   twoblocks=$$((img_blocksize * 2)); \
   onepct=$$((((($(2) / 100) - 1) / img_blocksize + 1) * img_blocksize)); \
-  reserve=$$(((twoblocks > onepct ? twoblocks : onepct) + \
-               reservedblocks * img_blocksize)); \
+  reserve=$$((twoblocks > onepct ? twoblocks : onepct)); \
   maxsize=$$(($(2) - reserve)); \
   echo "$$printname maxsize=$$maxsize blocksize=$$img_blocksize total=$$total reserve=$$reserve"; \
   if [ "$$total" -gt "$$maxsize" ]; then \
@@ -2040,8 +2053,7 @@
 # $(2): The partition size.
 define assert-max-image-size
 $(if $(2), \
-  $(call assert-max-file-size,$(1),$(call image-size-from-data-size,$(2))), \
-  true)
+  $(call assert-max-file-size,$(1),$(call image-size-from-data-size,$(2))))
 endef
 
 
@@ -2140,17 +2152,19 @@
 #    $(1)  target
 #    $(2)  stable api file
 #    $(3)  api file to be tested
-#    $(4)  arguments for apicheck
-#    $(5)  command to run if apicheck failed
-#    $(6)  target dependent on this api check
-#    $(7)  additional dependencies
+#    $(4)  stable removed api file
+#    $(5)  removed api file to be tested
+#    $(6)  arguments for apicheck
+#    $(7)  command to run if apicheck failed
+#    $(8)  target dependent on this api check
+#    $(9)  additional dependencies
 define check-api
-$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp: $(2) $(3) $(APICHECK) $(7)
+$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp: $(2) $(3) $(4) $(APICHECK) $(9)
 	@echo "Checking API:" $(1)
-	$(hide) ( $(APICHECK_COMMAND) $(4) $(2) $(3) || ( $(5) ; exit 38 ) )
+	$(hide) ( $(APICHECK_COMMAND) $(6) $(2) $(3) $(4) $(5) || ( $(7) ; exit 38 ) )
 	$(hide) mkdir -p $$(dir $$@)
 	$(hide) touch $$@
-$(6): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp
+$(8): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp
 endef
 
 ## Whether to build from source if prebuilt alternative exists
@@ -2177,11 +2191,13 @@
 $(if $(call if-build-from-source,$(2),$(3)),$(eval include $(1)))
 endef
 
-## Return the arch for the source file of a prebuilt
+# Return the arch for the source file of a prebuilt
+# Return "none" if no matching arch found, so the result can be passed to
+# LOCAL_MODULE_TARGET_ARCH.
 # $(1) the list of archs supported by the prebuilt
 define get-prebuilt-src-arch
 $(strip $(if $(filter $(TARGET_ARCH),$(1)),$(TARGET_ARCH),\
-  $(if $(filter $(TARGET_2ND_ARCH),$(1)),$(TARGET_2ND_ARCH))))
+  $(if $(filter $(TARGET_2ND_ARCH),$(1)),$(TARGET_2ND_ARCH),none)))
 endef
 
 ###########################################################
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 76fc130..8a19b66 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -10,6 +10,8 @@
 DEXPREOPT_BOOT_JARS_MODULES := $(PRODUCT_BOOT_JARS)
 PRODUCT_BOOTCLASSPATH := $(subst $(space),:,$(foreach m,$(DEXPREOPT_BOOT_JARS_MODULES),/system/framework/$(m).jar))
 
+PRODUCT_SYSTEM_SERVER_CLASSPATH := $(subst $(space),:,$(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),/system/framework/$(m).jar))
+
 DEXPREOPT_BUILD_DIR := $(OUT_DIR)
 DEXPREOPT_PRODUCT_DIR_FULL_PATH := $(PRODUCT_OUT)/dex_bootjars
 DEXPREOPT_PRODUCT_DIR := $(patsubst $(DEXPREOPT_BUILD_DIR)/%,%,$(DEXPREOPT_PRODUCT_DIR_FULL_PATH))
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 8fc0c67..5af2be2 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -20,12 +20,7 @@
 
 # start of image reserved address space
 LIBART_IMG_HOST_BASE_ADDRESS   := 0x60000000
-
-ifeq ($(TARGET_ARCH),mips)
-LIBART_IMG_TARGET_BASE_ADDRESS := 0x30000000
-else
 LIBART_IMG_TARGET_BASE_ADDRESS := 0x70000000
-endif
 
 define get-product-default-property
 $(strip $(patsubst $(1)=%,%,$(filter $(1)=%,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES))))
@@ -36,6 +31,17 @@
 DEX2OAT_XMS := $(call get-product-default-property,dalvik.vm.dex2oat-Xms)
 DEX2OAT_XMX := $(call get-product-default-property,dalvik.vm.dex2oat-Xmx)
 
+ifeq ($(TARGET_ARCH),mips)
+# MIPS specific overrides.
+# For MIPS the ART image is loaded at a lower address. This causes issues
+# with the image overlapping with memory on the host cross-compiling and
+# building the image. We therefore limit the Xmx value. This isn't done
+# via a property as we want the larger Xmx value if we're running on a
+# MIPS device.
+LIBART_IMG_TARGET_BASE_ADDRESS := 0x30000000
+DEX2OAT_XMX := 128m
+endif
+
 ########################################################################
 # The full system boot classpath
 
@@ -84,5 +90,6 @@
 	--oat-file=$(2) \
 	--android-root=$(PRODUCT_OUT)/system \
 	--instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
-	--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+	--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
+	--include-patch-information --runtime-arg -Xnorelocate --no-include-debug-symbols
 endef
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 7eefc0b..fe4c5a4 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -53,4 +53,4 @@
 		--image=$@ --base=$(LIBART_IMG_TARGET_BASE_ADDRESS) \
 		--instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
 		--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
-		--android-root=$(PRODUCT_OUT)/system
+		--android-root=$(PRODUCT_OUT)/system --include-patch-information --runtime-arg -Xnorelocate --no-include-debug-symbols
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 613b058..741f9a3 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -25,13 +25,19 @@
 ifdef LOCAL_UNINSTALLABLE_MODULE
 LOCAL_DEX_PREOPT :=
 endif
-ifeq (,$(strip $(all_java_sources)$(full_static_java_libs)$(my_prebuilt_src_file))) # contains no java code
+ifeq (,$(strip $(built_dex)$(my_prebuilt_src_file))) # contains no java code
 LOCAL_DEX_PREOPT :=
 endif
 # if module oat file requested in data, disable LOCAL_DEX_PREOPT, will default location to dalvik-cache
 ifneq (,$(filter $(LOCAL_MODULE),$(PRODUCT_DEX_PREOPT_PACKAGES_IN_DATA)))
 LOCAL_DEX_PREOPT :=
 endif
+# if WITH_DEXPREOPT_BOOT_IMG_ONLY=true and module is not in boot class path skip
+ifeq (true,$(WITH_DEXPREOPT_BOOT_IMG_ONLY))
+ifeq ($(filter $(DEXPREOPT_BOOT_JARS_MODULES),$(LOCAL_MODULE)),)
+LOCAL_DEX_PREOPT :=
+endif
+endif
 
 built_odex :=
 installed_odex :=
@@ -63,64 +69,29 @@
 # For a Java library, we build odex for both 1st arch and 2nd arch, if we have one.
 # #################################################
 # Odex for the 1st arch
-built_odex := $(call get-odex-file-path,$(DEX2OAT_TARGET_ARCH),$(LOCAL_BUILT_MODULE))
-ifdef LOCAL_DEX_PREOPT_IMAGE_LOCATION
-my_dex_preopt_image_location := $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
-else
-my_dex_preopt_image_location := $(DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
-endif
-my_dex_preopt_image_filename := $(call get-image-file-path,$(DEX2OAT_TARGET_ARCH),$(my_dex_preopt_image_location))
-$(built_odex): PRIVATE_2ND_ARCH_VAR_PREFIX :=
-$(built_odex): PRIVATE_DEX_LOCATION := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
-$(built_odex): PRIVATE_DEX_PREOPT_IMAGE_LOCATION := $(my_dex_preopt_image_location)
-$(built_odex) : $(DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT) \
-                $(DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS) \
-                $(my_dex_preopt_image_filename)
-installed_odex := $(call get-odex-file-path,$(DEX2OAT_TARGET_ARCH),$(LOCAL_INSTALLED_MODULE))
-built_installed_odex := $(built_odex):$(installed_odex)
+my_2nd_arch_prefix :=
+include $(BUILD_SYSTEM)/setup_one_odex.mk
 # #################################################
 # Odex for the 2nd arch
 ifdef TARGET_2ND_ARCH
-built_odex2 := $(call get-odex-file-path,$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(LOCAL_BUILT_MODULE))
-ifdef LOCAL_DEX_PREOPT_IMAGE_LOCATION
-my_dex_preopt_image_location := $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
-else
-my_dex_preopt_image_location := $($(TARGET_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
-endif
-my_dex_preopt_image_filename := $(call get-image-file-path,$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(my_dex_preopt_image_location))
-$(built_odex2): PRIVATE_2ND_ARCH_VAR_PREFIX := $(TARGET_2ND_ARCH_VAR_PREFIX)
-$(built_odex2): PRIVATE_DEX_LOCATION := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
-$(built_odex2): PRIVATE_DEX_PREOPT_IMAGE_LOCATION := $(my_dex_preopt_image_location)
-$(built_odex2) : $($(TARGET_2ND_ARCH_VAR_PREFIX)DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT) \
-                 $(DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS) \
-                 $(my_dex_preopt_image_filename)
-
-installed_odex2 := $(call get-odex-file-path,$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(LOCAL_INSTALLED_MODULE))
-built_odex += $(built_odex2)
-installed_odex += $(installed_odex2)
-built_installed_odex += $(built_odex2):$(installed_odex2)
+my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/setup_one_odex.mk
 endif  # TARGET_2ND_ARCH
 # #################################################
 else  # must be APPS
-# For an app, we build for the multilib arch it's targeted for.
-built_odex := $(call get-odex-file-path,$($(LOCAL_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(LOCAL_BUILT_MODULE))
-ifdef LOCAL_DEX_PREOPT_IMAGE_LOCATION
-my_dex_preopt_image_location := $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
-else
-my_dex_preopt_image_location := $($(LOCAL_2ND_ARCH_VAR_PREFIX)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
-endif
-my_dex_preopt_image_filename := $(call get-image-file-path,$($(LOCAL_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(my_dex_preopt_image_location))
-$(built_odex): PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-$(built_odex): PRIVATE_DEX_LOCATION := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
-$(built_odex): PRIVATE_DEX_PREOPT_IMAGE_LOCATION := $(my_dex_preopt_image_location)
-$(built_odex) : $($(LOCAL_2ND_ARCH_VAR_PREFIX)DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT) \
-                $(DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS) \
-                $(my_dex_preopt_image_filename)
-installed_odex := $(call get-odex-file-path,$($(LOCAL_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH),$(LOCAL_INSTALLED_MODULE))
-built_installed_odex := $(built_odex):$(installed_odex)
+# The preferred arch
+my_2nd_arch_prefix := $(LOCAL_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/setup_one_odex.mk
+ifdef TARGET_2ND_ARCH
+ifeq ($(LOCAL_MULTILIB),both)
+# The non-preferred arch
+my_2nd_arch_prefix := $(if $(LOCAL_2ND_ARCH_VAR_PREFIX),,$(TARGET_2ND_ARCH_VAR_PREFIX))
+include $(BUILD_SYSTEM)/setup_one_odex.mk
+endif  # LOCAL_MULTILIB is both
+endif  # TARGET_2ND_ARCH
 endif  # LOCAL_MODULE_CLASS
-endif # libart
-endif # boot jar
+endif  # libart
+endif  # boot jar
 
 ifdef built_odex
 # Use pattern rule - we may have multiple installed odex files.
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index ecfe3dc..d3e61d5 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -64,12 +64,15 @@
     # Use android_stubs_current if LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
     LOCAL_JAVA_LIBRARIES := android_stubs_current $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_stubs_current)
+  else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
+    LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_system_stubs_current)
   else
     LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
   endif
 else
-  LOCAL_JAVA_LIBRARIES := core-libart ext framework framework2 $(LOCAL_JAVA_LIBRARIES)
+  LOCAL_JAVA_LIBRARIES := core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
   $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-libart)
 endif  # LOCAL_SDK_VERSION
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
@@ -134,15 +137,13 @@
 $(full_target): PRIVATE_IN_CUSTOM_ASSET_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
 $(full_target): PRIVATE_OUT_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_ASSET_DIR)
 $(full_target): PRIVATE_OUT_CUSTOM_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
+
+html_dir_files :=
 ifneq ($(strip $(LOCAL_DROIDDOC_HTML_DIR)),)
 $(full_target): PRIVATE_DROIDDOC_HTML_DIR := -htmldir $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR)
+html_dir_files := $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f)
 else
-$(full_target): PRIVATE_DROIDDOC_HTML_DIR := 
-endif
-ifneq ($(strip $(LOCAL_ADDITIONAL_HTML_DIR)),)
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR := -htmldir2 $(LOCAL_PATH)/$(LOCAL_ADDITIONAL_HTML_DIR)
-else
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR :=
+$(full_target): PRIVATE_DROIDDOC_HTML_DIR :=
 endif
 ifneq ($(strip $(LOCAL_ADDITIONAL_HTML_DIR)),)
 $(full_target): PRIVATE_ADDITIONAL_HTML_DIR := -htmldir2 $(LOCAL_PATH)/$(LOCAL_ADDITIONAL_HTML_DIR)
@@ -153,8 +154,6 @@
 # TODO: not clear if this is used any more
 $(full_target): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
 
-html_dir_files := $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f)
-
 $(full_target): $(full_src_files) $(droiddoc_templates) $(droiddoc) $(html_dir_files) $(full_java_lib_deps) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	@echo Docs droiddoc: $(PRIVATE_OUT_DIR)
 	$(hide) mkdir -p $(dir $@)
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 396199c..3d6ad4a 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -110,6 +110,15 @@
 $(strip_output): PRIVATE_READELF := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_READELF)
 $(strip_output): $(strip_input) | $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP)
 	$(transform-to-stripped-keep-symbols)
+
+# A product may be configured to strip everything in some build variants.
+# We do the stripping as a post-install command so that LOCAL_BUILT_MODULE
+# is still with the symbols and we don't need to clean it (and relink) when
+# you switch build variant.
+ifneq ($(filter $(STRIP_EVERYTHING_BUILD_VARIANTS),$(TARGET_BUILD_VARIANT)),)
+$(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := \
+  $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_STRIP) --strip-all $(LOCAL_INSTALLED_MODULE)
+endif
 else
 # Don't strip the binary, just copy it.  We can't skip this step
 # because a copy of the binary must appear at LOCAL_BUILT_MODULE.
diff --git a/core/envsetup.mk b/core/envsetup.mk
index d0b5846..124a91b 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -119,11 +119,21 @@
 # Define them here so they can be used in product config files.
 TARGET_COPY_OUT_SYSTEM := system
 TARGET_COPY_OUT_DATA := data
-TARGET_COPY_OUT_VENDOR := system/vendor
+TARGET_COPY_OUT_OEM := oem
 TARGET_COPY_OUT_ROOT := root
 TARGET_COPY_OUT_RECOVERY := recovery
+###########################################
+# Define TARGET_COPY_OUT_VENDOR to a placeholder, for at this point
+# we don't know if the device wants to build a separate vendor.img
+# or just build vendor stuff into system.img.
+# A device can set up TARGET_COPY_OUT_VENDOR to "vendor" in its
+# BoardConfig.mk.
+# We'll substitute with the real value after loading BoardConfig.mk.
+_vendor_path_placeholder := ||VENDOR-PATH-PH||
+TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
+###########################################
 
-# Read the product specs so we an get TARGET_DEVICE and other
+# Read the product specs so we can get TARGET_DEVICE and other
 # variables that we need in order to locate the output files.
 include $(BUILD_SYSTEM)/product_config.mk
 
@@ -156,6 +166,17 @@
 TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
 board_config_mk :=
 
+###########################################
+# Now we can substitute with the real value of TARGET_COPY_OUT_VENDOR
+ifeq ($(TARGET_COPY_OUT_VENDOR),$(_vendor_path_placeholder))
+TARGET_COPY_OUT_VENDOR := system/vendor
+else ifeq ($(filter vendor system/vendor,$(TARGET_COPY_OUT_VENDOR)),)
+$(error TARGET_COPY_OUT_VENDOR must be either 'vendor' or 'system/vendor', seeing '$(TARGET_COPY_OUT_VENDOR)'.)
+endif
+PRODUCT_COPY_FILES := $(subst $(_vendor_path_placeholder),$(TARGET_COPY_OUT_VENDOR),$(PRODUCT_COPY_FILES))
+###########################################
+
+
 # ---------------------------------------------------------------
 # Set up configuration for target machine.
 # The following must be set:
@@ -224,6 +245,7 @@
 HOST_OUT_INTERMEDIATE_LIBRARIES := $(HOST_OUT_INTERMEDIATES)/lib
 HOST_OUT_NOTICE_FILES := $(HOST_OUT_INTERMEDIATES)/NOTICE_FILES
 HOST_OUT_COMMON_INTERMEDIATES := $(HOST_COMMON_OUT_ROOT)/obj
+HOST_OUT_FAKE := $(HOST_OUT)/fake_packages
 
 HOST_OUT_GEN := $(HOST_OUT)/gen
 HOST_OUT_COMMON_GEN := $(HOST_COMMON_OUT_ROOT)/gen
@@ -238,7 +260,11 @@
 
 # The default host library path.
 # It always points to the path where we build libraries in the default bitness.
-HOST_LIBRARY_PATH := $(HOST_OUT)/lib
+ifeq ($(HOST_PREFER_32_BIT),true)
+HOST_LIBRARY_PATH := $($(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)
+else
+HOST_LIBRARY_PATH := $(HOST_OUT_SHARED_LIBRARIES)
+endif
 
 TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
 TARGET_OUT_HEADERS := $(TARGET_OUT_INTERMEDIATES)/include
@@ -315,9 +341,26 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR_APPS)
 
+TARGET_OUT_OEM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_OEM)
+TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM)/bin
+ifneq ($(filter %64,$(TARGET_ARCH)),)
+TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib64
+else
+TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
+endif
+# We don't expect Java libraries in the oem.img.
+# TARGET_OUT_OEM_JAVA_LIBRARIES:= $(TARGET_OUT_OEM)/framework
+TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM)/app
+TARGET_OUT_OEM_ETC := $(TARGET_OUT_OEM)/etc
+
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM_EXECUTABLES)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM_APPS)
+
 TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
 TARGET_OUT_EXECUTABLES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/bin
 TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/lib
+TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/$(TARGET_COPY_OUT_VENDOR)/lib
 TARGET_ROOT_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
 TARGET_ROOT_OUT_SBIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/sbin
 TARGET_ROOT_OUT_BIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/bin
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index 2e380b6..9325287 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -26,9 +26,15 @@
 
 include $(BUILD_SYSTEM)/dynamic_binary.mk
 
+# Check for statically linked libc
+ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+ifneq ($(filter $(my_static_libraries),libc),)
+$(error $(LOCAL_PATH): $(LOCAL_MODULE) is statically linking libc to dynamic executable, please remove libc from static libs or set LOCAL_FORCE_STATIC_EXECUTABLE := true)
+endif
+endif
+
 # Define PRIVATE_ variables from global vars
 my_target_global_ld_dirs := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_LD_DIRS)
-my_target_fdo_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_LIB)
 my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
 my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
 my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTBEGIN_DYNAMIC_O)
@@ -48,7 +54,6 @@
 endif
 $(linked_module): PRIVATE_TARGET_GLOBAL_LD_DIRS := $(my_target_global_ld_dirs)
 $(linked_module): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
-$(linked_module): PRIVATE_TARGET_FDO_LIB := $(my_target_fdo_lib)
 $(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
 $(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
 $(linked_module): PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O := $(my_target_crtbegin_dynamic_o)
diff --git a/core/executable_prefer_symlink.mk b/core/executable_prefer_symlink.mk
index f66a5f2..2326e83 100644
--- a/core/executable_prefer_symlink.mk
+++ b/core/executable_prefer_symlink.mk
@@ -8,17 +8,27 @@
 # configuration. Note that we require the TARGET_IS_64_BIT
 # check because 32 bit targets may not define TARGET_PREFER_32_BIT_APPS
 # et al. since those variables make no sense in that context.
-
 ifneq ($(LOCAL_IS_HOST_MODULE),true)
   my_symlink := $(addprefix $(TARGET_OUT)/bin/, $(LOCAL_MODULE))
   ifeq ($(TARGET_IS_64_BIT),true)
-    ifneq ($(TARGET_PREFER_32_BIT_APPS),true)
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
+    ifeq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(TARGET_SUPPORTS_32_BIT_APPS),true|true)
+      # We support both 32 and 64 bit apps, so we will have to
+      # base our decision on whether the target prefers one or the
+      # other.
+      ifeq ($(TARGET_PREFER_32_BIT_APPS),true)
+        $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+      else
+        $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
+      endif
+    else ifeq ($(TARGET_SUPPORTS_64_BIT_APPS),true)
+      # We support only 64 bit apps.
+      $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
     else
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+      # We support only 32 bit apps.
+      $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
     endif
   else
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+    $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
   endif
 else
   my_symlink := $(addprefix $(HOST_OUT)/bin/, $(LOCAL_MODULE))
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index e15bde2..61eb3ff 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,7 @@
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS :=
 $(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
         $(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index e5ebb11..7e0e437 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -20,21 +20,65 @@
 
 #######################################
 include $(BUILD_SYSTEM)/host_java_library_common.mk
+#######################################
+
+# Enable emma instrumentation only if the module asks so.
+ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
+ifneq (true,$(EMMA_INSTRUMENT))
+LOCAL_EMMA_INSTRUMENT :=
+endif
+endif
+
+full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
+emma_intermediates_dir := $(intermediates.COMMON)/emma_out
+# emma is hardcoded to use the leaf name of its input for the output file --
+# only the output directory can be changed
+full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(notdir $(full_classes_compiled_jar))
+
+LOCAL_INTERMEDIATE_TARGETS += \
+    $(full_classes_compiled_jar) \
+    $(full_classes_emma_jar)
+
+#######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
 
-$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
+ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.em
+$(full_classes_emma_jar): PRIVATE_EMMA_INTERMEDIATES_DIR := $(emma_intermediates_dir)
+ifdef LOCAL_EMMA_COVERAGE_FILTER
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := $(LOCAL_EMMA_COVERAGE_FILTER)
+else
+# by default, avoid applying emma instrumentation onto emma classes itself,
+# otherwise there will be exceptions thrown
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := *,-emma,-emmarun,-com.vladium.*
+endif
+# this rule will generate both $(PRIVATE_EMMA_COVERAGE_FILE) and
+# $(full_classes_emma_jar)
+$(full_classes_emma_jar) : $(full_classes_compiled_jar) | $(EMMA_JAR)
+	$(transform-classes.jar-to-emma)
 
-java_alternative_checked_module :=
+$(LOCAL_BUILT_MODULE) : $(full_classes_emma_jar)
+	@echo Copying: $@
+	$(hide) $(ACP) -fp $< $@
+
+else # LOCAL_EMMA_INSTRUMENT
+# Directly build into LOCAL_BUILT_MODULE.
+full_classes_compiled_jar := $(LOCAL_BUILT_MODULE)
+endif # LOCAL_EMMA_INSTRUMENT
+
+$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
 
 # The layers file allows you to enforce a layering between java packages.
 # Run build/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
-$(LOCAL_BUILT_MODULE): PRIVATE_JAR_EXCLUDE_FILES :=
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(LOCAL_BUILT_MODULE): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
+$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS :=
+$(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
 		$(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-java-to-package)
diff --git a/core/host_test_internal.mk b/core/host_test_internal.mk
index 426c400..691468d 100644
--- a/core/host_test_internal.mk
+++ b/core/host_test_internal.mk
@@ -2,7 +2,14 @@
 ## Shared definitions for all host test compilations.
 #####################################################
 
-LOCAL_CFLAGS += -DGTEST_OS_LINUX -DGTEST_HAS_STD_STRING -O0 -g
+ifeq ($(HOST_OS),windows)
+LOCAL_CFLAGS += -DGTEST_OS_WINDOWS
+else
+LOCAL_CFLAGS += -DGTEST_OS_LINUX
+LOCAL_LDLIBS += -lpthread
+endif
+
+LOCAL_CFLAGS += -DGTEST_HAS_STD_STRING -O0 -g
 LOCAL_C_INCLUDES +=  external/gtest/include
 
 ifneq ($(filter libc++,$(LOCAL_SHARED_LIBRARIES)),)
@@ -12,4 +19,3 @@
 LOCAL_SHARED_LIBRARIES +=
 endif
 
-LOCAL_LDLIBS += -lpthread
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index eb90c50..944420b 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -18,7 +18,7 @@
           $(LOCAL_JNI_SHARED_LIBRARIES)))
 
 # App-specific lib path.
-my_app_lib_path :=  $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)/$(basename $(my_installed_module_stem))
+my_app_lib_path := $(dir $(LOCAL_INSTALLED_MODULE))lib/$(TARGET_$(my_2nd_arch_prefix)ARCH)
 my_extracted_jni_libs :=
 
 ifdef my_embed_jni
@@ -54,16 +54,20 @@
 # The jni libaries will be installed to the system.img.
 my_jni_filenames := $(notdir $(my_jni_shared_libraries))
 # Make sure the JNI libraries get installed
-$(LOCAL_INSTALLED_MODULE) : | $(addprefix $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)/, $(my_jni_filenames))
+my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
+$(LOCAL_INSTALLED_MODULE) : | $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
 
 # Create symlink in the app specific lib path
 ifdef LOCAL_POST_INSTALL_CMD
 # Add a shell command separator
 LOCAL_POST_INSTALL_CMD += ;
 endif
+
+my_symlink_target_dir := $(patsubst $(PRODUCT_OUT)%,%,\
+    $(my_shared_library_path))
 LOCAL_POST_INSTALL_CMD += \
   mkdir -p $(my_app_lib_path) \
-  $(foreach lib, $(my_jni_filenames), ;ln -sf ../$(lib) $(my_app_lib_path)/$(lib))
+  $(foreach lib, $(my_jni_filenames), ;ln -sf $(my_symlink_target_dir)/$(lib) $(my_app_lib_path)/$(lib))
 $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
 
 # Clear jni_shared_libraries to not embed it into the apk.
diff --git a/core/java.mk b/core/java.mk
index 79be181..debdf53 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -26,6 +26,8 @@
       ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
         # Use android_stubs_current if LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
         LOCAL_JAVA_LIBRARIES := android_stubs_current $(LOCAL_JAVA_LIBRARIES)
+      else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
+        LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
       else
         LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
       endif
@@ -72,10 +74,10 @@
 # Choose leaf name for the compiled jar file.
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
 full_classes_compiled_jar_leaf := classes-no-debug-var.jar
-built_dex_intermediate_leaf := classes-no-local.dex
+built_dex_intermediate_leaf := no-local
 else
 full_classes_compiled_jar_leaf := classes-full-debug.jar
-built_dex_intermediate_leaf := classes-with-local.dex
+built_dex_intermediate_leaf := with-local
 endif
 
 ifeq ($(LOCAL_PROGUARD_ENABLED),disabled)
@@ -96,7 +98,7 @@
 # only the output directory can be changed
 full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(jarjar_leaf)
 full_classes_proguard_jar := $(intermediates.COMMON)/$(proguard_jar_leaf)
-built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)
+built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)/classes.dex
 full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
 
 ifeq ($(LOCAL_MODULE_CLASS)$(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),APPS)
@@ -142,7 +144,7 @@
 else
 ifneq (,$(LOCAL_SDK_VERSION))
 # Set target-api for LOCAL_SDK_VERSIONs other than current.
-ifneq (,$(filter-out current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
 renderscript_target_api := $(LOCAL_SDK_VERSION)
 endif
 endif  # LOCAL_SDK_VERSION is set
@@ -158,7 +160,7 @@
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
 
 # prepend the RenderScript system include path
-ifneq ($(filter-out current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current,$(LOCAL_SDK_VERSION))),)
+ifneq ($(filter-out current system_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_VERSION))),)
 # if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS
 LOCAL_RENDERSCRIPT_INCLUDES := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \
@@ -321,6 +323,10 @@
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
 
+ifdef LOCAL_RMTYPEDEFS
+$(full_classes_compiled_jar): | $(RMTYPEDEFS)
+endif
+
 # Compile the java files to a .jar file.
 # This intentionally depends on java_sources, not all_java_sources.
 # Deps for generated source files must be handled separately,
@@ -328,6 +334,8 @@
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
 $(full_classes_compiled_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
 $(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
         $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
@@ -473,7 +481,9 @@
 	$(transform-classes.jar-to-dex)
 $(built_dex): $(built_dex_intermediate) | $(ACP)
 	@echo Copying: $@
-	$(hide) $(ACP) -fp $< $@
+	$(hide) mkdir -p $(dir $@)
+	$(hide) rm -f $(dir $@)/classes*.dex
+	$(hide) $(ACP) -fp $(dir $<)/classes*.dex $(dir $@)
 ifneq ($(GENERATE_DEX_DEBUG),)
 	$(install-dex-debug)
 endif
@@ -500,7 +510,7 @@
 $(findbugs_html) : $(findbugs_xml)
 	@mkdir -p $(dir $@)
 	@echo ConvertXmlToText: $@
-	$(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
+	$(hide) $(FINDBUGS_DIR)/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
 	> $@
 
 $(LOCAL_MODULE)-findbugs : $(findbugs_html)
diff --git a/core/main.mk b/core/main.mk
index 77efc19..9d6e233 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -185,12 +185,8 @@
 # java version is really openjdk
 ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
 $(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info This build requires OpenJDK, but you are using:)
+$(info You asked for an OpenJDK 7 build but your version is)
 $(info $(java_version_str).)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
 $(info ************************************************************)
 $(error stop)
 endif # java version is not OpenJdk
@@ -312,6 +308,11 @@
 is_sdk_build := true
 endif
 
+ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+ifdef TARGET_2ND_ARCH
+ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_2ND_ARCH).features=$($(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
+endif
+
 ## user/userdebug ##
 
 user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
@@ -377,10 +378,12 @@
           $(call collapse-pairs, $(ADDITIONAL_BUILD_PROPERTIES))) \
           ro.setupwizard.mode=OPTIONAL
 endif
-# Don't even verify the image on eng builds to speed startup
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.image-dex2oat-flags=--compiler-filter=verify-none
-# Don't compile apps on eng builds to speed startup
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.dex2oat-flags=--compiler-filter=interpret-only
+ifndef is_sdk_build
+  # Don't even verify the image on eng builds to speed startup
+  ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.image-dex2oat-filter=verify-none
+  # Don't compile apps on eng builds to speed startup
+  ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.dex2oat-filter=interpret-only
+endif
 endif
 
 ## sdk ##
@@ -391,7 +394,7 @@
 sdk_repo_goal := $(strip $(filter sdk_repo,$(MAKECMDGOALS)))
 MAKECMDGOALS := $(strip $(filter-out sdk_repo,$(MAKECMDGOALS)))
 
-ifneq ($(words $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild target-files-package,$(MAKECMDGOALS))),1)
+ifneq ($(words $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS))),1)
 $(error The 'sdk' target may not be specified with any other targets)
 endif
 
@@ -933,6 +936,9 @@
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
 
+  $(SYMBOLS_ZIP) : $(apps_only_installed_files)
+  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
+
 .PHONY: apps_only
 apps_only: $(unbundled_build_modules)
 
@@ -965,6 +971,7 @@
   $(foreach f,$(INSTALLED_RADIOIMAGE_TARGET), \
     $(call dist-for-goals, droidcore, $(f)))
 
+  ifneq ($(ANDROID_BUILD_EMBEDDED),true)
   ifneq ($(TARGET_BUILD_PDK),true)
     $(call dist-for-goals, droidcore, \
       $(APPS_ZIP) \
@@ -972,6 +979,7 @@
       $(PACKAGE_STATS_FILE) \
     )
   endif
+  endif
 
   ifeq ($(EMMA_INSTRUMENT),true)
     $(EMMA_META_ZIP) : $(INSTALLED_SYSTEMIMAGE)
@@ -1011,6 +1019,12 @@
 target-native-tests : native-target-tests
 tests : host-tests target-tests
 
+# To catch more build breakage, check build tests modules in eng and userdebug builds.
+ifneq ($(TARGET_BUILD_PDK),true)
+ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
+droidcore : target-tests host-tests
+endif
+endif
 
 .PHONY: lintall
 
@@ -1035,7 +1049,7 @@
 
 .PHONY: clean
 clean:
-	@rm -rf $(OUT_DIR)
+	@rm -rf $(OUT_DIR)/*
 	@echo "Entire build directory removed."
 
 .PHONY: clobber
diff --git a/core/package_internal.mk b/core/package_internal.mk
index d52703a..bb458d4 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -68,8 +68,15 @@
 LOCAL_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) -z
 endif
 
+ifdef LOCAL_PACKAGE_SPLITS
+LOCAL_AAPT_FLAGS += $(addprefix --split ,$(LOCAL_PACKAGE_SPLITS))
+endif
+
+need_compile_asset :=
 ifeq (,$(LOCAL_ASSET_DIR))
 LOCAL_ASSET_DIR := $(LOCAL_PATH)/assets
+else
+need_compile_asset := true
 endif
 
 # LOCAL_RESOURCE_DIR may point to resource generated during the build
@@ -97,6 +104,10 @@
        ) \
      ))
 
+ifneq ($(all_assets),)
+need_compile_asset := true
+endif
+
 all_resources := $(strip \
     $(foreach dir, $(LOCAL_RESOURCE_DIR), \
       $(addprefix $(dir)/, \
@@ -115,7 +126,7 @@
 package_expected_intermediates_COMMON := $(call local-intermediates-dir,COMMON)
 # If no assets or resources were found, clear the directory variables so
 # we don't try to build them.
-ifeq (,$(all_assets))
+ifneq (true,$(need_compile_asset))
 LOCAL_ASSET_DIR:=
 endif
 ifneq (true,$(need_compile_res))
@@ -130,6 +141,7 @@
 endif
 
 LOCAL_BUILT_MODULE_STEM := package.apk
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
 
 LOCAL_PROGUARD_ENABLED:=$(strip $(LOCAL_PROGUARD_ENABLED))
 ifndef LOCAL_PROGUARD_ENABLED
@@ -193,7 +205,7 @@
 
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-ifneq (,$(filter-out current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
 else
@@ -241,6 +253,7 @@
 
 $(proguard_options_file): $(R_file_stamp)
 
+resource_export_package :=
 ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
 # Put this module's resources into a PRODUCT-agnositc package that
 # other packages can use to build their own PRODUCT-agnostic R.java (etc.)
@@ -280,7 +293,7 @@
 # Most packages should link against the resources defined by framework-res.
 # Even if they don't have their own resources, they may use framework
 # resources.
-ifneq ($(filter-out current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_RES_VERSION))),)
 # for released sdk versions, the platform resources were built into android.jar.
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
@@ -294,9 +307,19 @@
 framework_res_package_export_deps := \
     $(dir $(framework_res_package_export))src/R.stamp
 endif # LOCAL_SDK_RES_VERSION
-$(R_file_stamp): $(framework_res_package_export_deps)
+all_library_res_package_exports := \
+    $(framework_res_package_export) \
+    $(foreach lib,$(LOCAL_RES_LIBRARIES),\
+        $(call intermediates-dir-for,APPS,$(lib),,COMMON)/package-export.apk)
+
+all_library_res_package_export_deps := \
+    $(framework_res_package_export_deps) \
+    $(foreach lib,$(LOCAL_RES_LIBRARIES),\
+        $(call intermediates-dir-for,APPS,$(lib),,COMMON)/src/R.stamp)
+
+$(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_res_package_export_deps)
 $(LOCAL_INTERMEDIATE_TARGETS): \
-    PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
+    PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
 endif # LOCAL_NO_STANDARD_LIBRARIES
 
 ifneq ($(full_classes_jar),)
@@ -354,8 +377,12 @@
     $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 else
     $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_CONFIG := $(PRODUCT_AAPT_CONFIG)
+ifdef LOCAL_PACKAGE_SPLITS
+    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+else
     $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(PRODUCT_AAPT_PREF_CONFIG)
 endif
+endif
 $(LOCAL_BUILT_MODULE): $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
 	$(create-empty-package)
@@ -383,7 +410,8 @@
 ## Rule to build the odex file
 ifdef LOCAL_DEX_PREOPT
 $(built_odex): PRIVATE_DEX_FILE := $(built_dex)
-$(built_odex) : $(built_dex)
+# Use pattern rule - we may have multiple built odex files.
+$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(built_dex)
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 	$(add-dex-to-package)
 	$(hide) mv $@ $@.input
@@ -391,6 +419,44 @@
 	$(hide) rm $@.input
 endif
 
+###############################
+## APK splits
+ifdef LOCAL_PACKAGE_SPLITS
+# LOCAL_PACKAGE_SPLITS is a list of resource labels.
+# aapt will convert comma inside resource lable to underscore in the file names.
+my_split_suffixes := $(subst $(comma),_,$(LOCAL_PACKAGE_SPLITS))
+built_apk_splits := $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk)
+installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+
+# The splits should have been built in the same command building the base apk.
+# This rule just runs signing and zipalign etc.
+# Note that we explicily check the existence of the split apk and remove the
+# built base apk if the split apk isn't there.
+# That way the build system will rerun the aapt after the user changes the splitting parameters.
+$(built_apk_splits): PRIVATE_PRIVATE_KEY := $(private_key)
+$(built_apk_splits): PRIVATE_CERTIFICATE := $(certificate)
+$(built_apk_splits) : $(built_module_path)/%.apk : $(LOCAL_BUILT_MODULE)
+	$(hide) if [ ! -f $@ ]; then \
+	  echo 'No $@ generated, check your apk splitting parameters.' 1>&2; \
+	  rm $<; exit 1; \
+	fi
+	$(sign-package)
+	$(align-package)
+
+# Rules to install the splits
+$(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk | $(ACP)
+	@echo "Install: $@"
+	$(copy-file-to-new-target)
+
+# Register the additional built and installed files.
+ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
+  $(foreach s,$(my_split_suffixes),$(built_module_path)/package_$(s).apk:$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
+
+# Make sure to install the splits when you run "make <module_name>".
+$(my_register_name): $(installed_apk_splits)
+endif # LOCAL_PACKAGE_SPLITS
+
 # Save information about this package
 PACKAGES.$(LOCAL_PACKAGE_NAME).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
 PACKAGES.$(LOCAL_PACKAGE_NAME).RESOURCE_FILES := $(all_resources)
diff --git a/core/pathmap.mk b/core/pathmap.mk
index f12d19c..0820885 100644
--- a/core/pathmap.mk
+++ b/core/pathmap.mk
@@ -29,10 +29,8 @@
 pathmap_INCL := \
     bootloader:bootable/bootloader/legacy/include \
     camera:system/media/camera/include \
-    corecg:external/skia/include/core \
     frameworks-base:frameworks/base/include \
     frameworks-native:frameworks/native/include \
-    graphics:external/skia/include/core \
     libc:bionic/libc/include \
     libhardware:hardware/libhardware/include \
     libhardware_legacy:hardware/libhardware_legacy/include \
@@ -51,6 +49,7 @@
     audio-route:system/media/audio_route/include \
     wilhelm:frameworks/wilhelm/include \
     wilhelm-ut:frameworks/wilhelm/src/ut \
+    mediandk:frameworks/av/media/ndk/ \
     speex:external/speex/include
 
 #
@@ -86,7 +85,9 @@
 	    drm \
 	    opengl \
 	    sax \
+	    telecomm \
 	    telephony \
+	    phone \
 	    wifi \
 	    keystore \
 	    rs \
@@ -109,20 +110,34 @@
         v4 \
         v7/gridlayout \
         v7/appcompat \
+        v7/cardview \
         v7/mediarouter \
+        v7/palette \
+        v7/recyclerview \
         v8/renderscript \
-        v13
+        v13 \
+        v17/leanback
+
+#
+# A list of all source roots under frameworks/multidex.
+#
+FRAMEWORKS_MULTIDEX_SUBDIRS := \
+        multidex/library/src \
+        multidex/instrumentation/src
 
 #
 # A version of FRAMEWORKS_SUPPORT_SUBDIRS that is expanded to full paths from
 # the root of the tree.
 #
 FRAMEWORKS_SUPPORT_JAVA_SRC_DIRS := \
-	$(addprefix frameworks/support/,$(FRAMEWORKS_SUPPORT_SUBDIRS))
+	$(addprefix frameworks/support/,$(FRAMEWORKS_SUPPORT_SUBDIRS)) \
+	$(addprefix frameworks/,$(FRAMEWORKS_MULTIDEX_SUBDIRS))
 
 #
 # A list of support library modules.
 #
 FRAMEWORKS_SUPPORT_JAVA_LIBRARIES := \
-    $(foreach dir,$(FRAMEWORKS_SUPPORT_SUBDIRS),android-support-$(subst /,-,$(dir)))
+    $(foreach dir,$(FRAMEWORKS_SUPPORT_SUBDIRS),android-support-$(subst /,-,$(dir))) \
+    android-support-multidex \
+    android-support-multidex-instrumentation
 
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index b0cccc9..838754f 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -18,9 +18,14 @@
 
 # if PDK_FUSION_PLATFORM_ZIP is specified, do not override.
 ifndef PDK_FUSION_PLATFORM_ZIP
+# Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
+# but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
+# with vendor/pdk/TARGET_PRODUCT.
 _pdk_fusion_default_platform_zip = $(wildcard \
 vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
-vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip)
+vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
+vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
+vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip)
 ifneq (,$(_pdk_fusion_default_platform_zip))
 PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
 TARGET_BUILD_PDK := true
@@ -48,14 +53,14 @@
 # all paths under out dir
 PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \
 	target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
-	target/common/obj/JAVA_LIBRARIES/core_intermediates \
+	target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
 	target/common/obj/JAVA_LIBRARIES/core-junit_intermediates \
 	target/common/obj/JAVA_LIBRARIES/ext_intermediates \
 	target/common/obj/JAVA_LIBRARIES/framework_intermediates \
-	target/common/obj/JAVA_LIBRARIES/framework2_intermediates \
 	target/common/obj/JAVA_LIBRARIES/android.test.runner_intermediates \
 	target/common/obj/JAVA_LIBRARIES/telephony-common_intermediates \
 	target/common/obj/JAVA_LIBRARIES/voip-common_intermediates \
+	target/common/obj/JAVA_LIBRARIES/ims-common_intermediates \
 	target/common/obj/JAVA_LIBRARIES/mms-common_intermediates \
 	target/common/obj/JAVA_LIBRARIES/android-ex-camera2_intermediates \
 	target/common/obj/JAVA_LIBRARIES/android-common_intermediates \
@@ -187,12 +192,16 @@
 ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_BUILD_PDK)))
 # files under $(PRODUCT_OUT)/symbols to help debugging.
 # Source not included to PDK due to dependency issue, so provide symbols instead.
+
+# We may not be building all of them.
+# The platform.zip just silently ignores the nonexistent ones.
 PDK_SYMBOL_FILES_LIST := \
-	system/bin/app_process
+    system/bin/app_process32 \
+    system/bin/app_process64
 
 ifdef PDK_FUSION_PLATFORM_ZIP
 # symbols should be explicitly pulled for fusion build
-$(foreach f,$(PDK_SYMBOL_FILES_LIST),\
+$(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\
   $(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f))))
 endif # PLATFORM_ZIP
 endif # platform.zip build or PDK
diff --git a/core/post_clean.mk b/core/post_clean.mk
index 213c43c..0273ff2 100644
--- a/core/post_clean.mk
+++ b/core/post_clean.mk
@@ -15,6 +15,7 @@
 # Clean steps that need global knowledge of individual modules.
 # This file must be included after all Android.mks have been loaded.
 
+#######################################################
 # Checks the current build configurations against the previous build,
 # clean artifacts in TARGET_COMMON_OUT_ROOT if necessary.
 # If a package's resource overlay has been changed, its R class needs to be
@@ -51,3 +52,45 @@
 previous_package_overlay_config :=
 current_package_overlay_config :=
 current_all_packages_config :=
+
+#######################################################
+# Check if we need to delete obsolete aidl-generated java files.
+# When an aidl file gets deleted (or renamed), the generated java file is obsolete.
+previous_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/previous_aidl_config.mk
+current_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/current_aidl_config.mk
+
+$(shell rm -rf $(current_aidl_config) \
+  && mkdir -p $(dir $(current_aidl_config))\
+  && touch $(current_aidl_config))
+-include $(previous_aidl_config)
+
+intermediates_to_clean :=
+modules_with_aidl_files :=
+$(foreach p, $(ALL_MODULES), \
+  $(if $(ALL_MODULES.$(p).AIDL_FILES),\
+    $(eval modules_with_aidl_files += $(p))\
+    $(shell echo 'AIDL_FILES.$(p) := $(ALL_MODULES.$(p).AIDL_FILES)' >> $(current_aidl_config)))\
+  $(if $(filter-out $(ALL_MODULES.$(p).AIDL_FILES),$(AIDL_FILES.$(p))),\
+    $(eval intermediates_to_clean += $(ALL_MODULES.$(p).INTERMEDIATE_SOURCE_DIR))))
+intermediates_to_clean := $(strip $(intermediates_to_clean))
+ifdef intermediates_to_clean
+$(info *** Obsolete aidl-generated files detected, clean intermediate files...)
+$(info *** rm -rf $(intermediates_to_clean))
+$(shell rm -rf $(intermediates_to_clean))
+intermediates_to_clean :=
+endif
+
+# For modules not loaded by the current build (e.g. you are running mm/mmm),
+# we copy the info from the previous bulid.
+$(foreach p, $(filter-out $(ALL_MODULES),$(MODULES_WITH_AIDL_FILES)),\
+  $(shell echo 'AIDL_FILES.$(p) := $(AIDL_FILES.$(p))' >> $(current_aidl_config)))
+MODULES_WITH_AIDL_FILES := $(sort $(MODULES_WITH_AIDL_FILES) $(modules_with_aidl_files))
+$(shell echo 'MODULES_WITH_AIDL_FILES := $(MODULES_WITH_AIDL_FILES)' >> $(current_aidl_config))
+
+# Now current becomes previous.
+$(shell mv -f $(current_aidl_config) $(previous_aidl_config))
+
+MODULES_WITH_AIDL_FILES :=
+modules_with_aidl_files :=
+previous_aidl_config :=
+current_aidl_config :=
diff --git a/core/prebuilt.mk b/core/prebuilt.mk
index 33f5dc6..ba0e757 100644
--- a/core/prebuilt.mk
+++ b/core/prebuilt.mk
@@ -17,6 +17,7 @@
 my_skip_non_preferred_arch :=
 
 # check if first arch is supported
+LOCAL_2ND_ARCH_VAR_PREFIX :=
 include $(BUILD_SYSTEM)/module_arch_supported.mk
 ifeq ($(my_module_arch_supported),true)
 # first arch is supported
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index b5e5189..284884c 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -54,6 +54,11 @@
 endif
 endif
 
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
+LOCAL_BUILT_MODULE_STEM := package.apk
+LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
+endif
+
 ifeq ($(LOCAL_STRIP_MODULE),true)
   ifdef LOCAL_IS_HOST_MODULE
     $(error Cannot strip host module LOCAL_PATH=$(LOCAL_PATH))
@@ -106,6 +111,7 @@
 
 endif  # LOCAL_STRIP_MODULE not true
 
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
 PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
 
 rs_compatibility_jni_libs :=
@@ -125,11 +131,9 @@
   $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
 endif
 ifeq ($(LOCAL_CERTIFICATE),)
-  ifneq ($(filter APPS,$(LOCAL_MODULE_CLASS)),)
-    # It is now a build error to add a prebuilt .apk without
-    # specifying a key for it.
-    $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
-  endif
+  # It is now a build error to add a prebuilt .apk without
+  # specifying a key for it.
+  $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
 else ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
   # The magic string "PRESIGNED" means this package is already checked
   # signed with its release key.
@@ -153,10 +157,10 @@
   $(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
 endif
 
-ifneq ($(filter APPS,$(LOCAL_MODULE_CLASS)),)
-
-# Disable dex-preopt of prebuilts to save space
+# Disable dex-preopt of prebuilts to save space, if requested.
+ifeq ($(DONT_DEXPREOPT_PREBUILTS),true)
 LOCAL_DEX_PREOPT := false
+endif
 
 #######################################
 # defines built_odex along with rule to install odex
@@ -185,6 +189,42 @@
 	$(call dexpreopt-one-file,$<,$@)
 endif
 
+###############################
+## Install split apks.
+ifdef LOCAL_PACKAGE_SPLITS
+# LOCAL_PACKAGE_SPLITS is a list of apks to be installed.
+built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
+
+# Rules to sign and zipalign the split apks.
+my_src_dir := $(sort $(dir $(LOCAL_PACKAGE_SPLITS)))
+ifneq (1,$(words $(my_src_dir)))
+$(error You must put all the split source apks in the same folder: $(LOCAL_PACKAGE_SPLITS))
+endif
+my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
+
+$(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
+$(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk | $(ACP)
+	$(copy-file-to-new-target)
+	$(sign-package)
+	$(align-package)
+
+# Rules to install the split apks.
+$(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk | $(ACP)
+	@echo "Install: $@"
+	$(copy-file-to-new-target)
+
+# Register the additional built and installed files.
+ALL_MODULES.$(my_register_name).INSTALLED += $(installed_apk_splits)
+ALL_MODULES.$(my_register_name).BUILT_INSTALLED += \
+  $(foreach s,$(LOCAL_PACKAGE_SPLITS),$(built_module_path)/$(notdir $(s)):$(my_module_path)/$(notdir $(s)))
+
+# Make sure to install the splits when you run "make <module_name>".
+$(my_register_name): $(installed_apk_splits)
+
+endif # LOCAL_PACKAGE_SPLITS
+
 else # LOCAL_MODULE_CLASS != APPS
 ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
 $(built_module) : $(my_prebuilt_src_file)
diff --git a/core/product.mk b/core/product.mk
index 174b429..0075acd 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -91,6 +91,7 @@
     PRODUCT_SDK_ADDON_COPY_FILES \
     PRODUCT_SDK_ADDON_COPY_MODULES \
     PRODUCT_SDK_ADDON_DOC_MODULES \
+    PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP \
     PRODUCT_DEFAULT_WIFI_CHANNELS \
     PRODUCT_DEFAULT_DEV_CERTIFICATE \
     PRODUCT_RESTRICT_VENDOR_FILES \
@@ -100,6 +101,13 @@
     PRODUCT_RUNTIMES \
     PRODUCT_BOOT_JARS \
     PRODUCT_DEX_PREOPT_IMAGE_IN_DATA \
+    PRODUCT_SUPPORTS_VERITY \
+    PRODUCT_OEM_PROPERTIES \
+    PRODUCT_SYSTEM_PROPERTY_BLACKLIST \
+    PRODUCT_SYSTEM_SERVER_JARS \
+    PRODUCT_VERITY_SIGNING_KEY \
+    PRODUCT_SYSTEM_VERITY_PARTITION \
+    PRODUCT_VENDOR_VERITY_PARTITION
 
 define dump-product
 $(info ==== $(1) ====)\
@@ -211,6 +219,7 @@
 
 _product_stash_var_list := $(_product_var_list) \
 	PRODUCT_BOOTCLASSPATH \
+	PRODUCT_SYSTEM_SERVER_CLASSPATH \
 	TARGET_ARCH \
 	TARGET_ARCH_VARIANT \
 	TARGET_CPU_VARIANT \
@@ -244,17 +253,20 @@
 	BOARD_BOOTIMAGE_PARTITION_SIZE \
 	BOARD_RECOVERYIMAGE_PARTITION_SIZE \
 	BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+	BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_USERDATAIMAGE_PARTITION_SIZE \
 	BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_CACHEIMAGE_PARTITION_SIZE \
 	BOARD_FLASH_BLOCK_SIZE \
-	BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+	BOARD_VENDORIMAGE_PARTITION_SIZE \
+	BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
 	BOARD_INSTALLER_CMDLINE \
 
 
 _product_stash_var_list += \
 	DEFAULT_SYSTEM_DEV_CERTIFICATE \
-	WITH_DEXPREOPT
+	WITH_DEXPREOPT \
+	WITH_DEXPREOPT_BOOT_IMG_ONLY
 
 #
 # Stash values of the variables in _product_stash_var_list.
diff --git a/core/product_config.mk b/core/product_config.mk
index 9468362..32e351c 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -281,6 +281,7 @@
 
 # A list of module names of BOOTCLASSPATH (jar files)
 PRODUCT_BOOT_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BOOT_JARS))
+PRODUCT_SYSTEM_SERVER_JARS := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_SERVER_JARS))
 
 # Find the device that this product maps to.
 TARGET_DEVICE := $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEVICE)
@@ -314,11 +315,13 @@
     $(if $(filter %dpi,$(PRODUCT_AAPT_CONFIG)),,mdpi))
 PRODUCT_AAPT_PREF_CONFIG := $(strip $(PRODUCT_AAPT_PREF_CONFIG))
 
-# Everyone gets nodpi assets which are density-independent.
-PRODUCT_AAPT_CONFIG += nodpi
+# Everyone gets nodpi and anydpi assets which are density-independent.
+PRODUCT_AAPT_CONFIG += nodpi anydpi
+
+# Keep a copy of the space-separated config
+PRODUCT_AAPT_CONFIG_SP := $(PRODUCT_AAPT_CONFIG)
 
 # Convert spaces to commas.
-comma := ,
 PRODUCT_AAPT_CONFIG := \
     $(subst $(space),$(comma),$(strip $(PRODUCT_AAPT_CONFIG)))
 PRODUCT_AAPT_PREF_CONFIG := \
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 622d4ee..4a85db0 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -59,3 +59,6 @@
 # platform version.  We know about them, and they are safe.
 # See proguard-android.txt in the SDK package.
 -dontwarn android.support.**
+
+# Less spammy.
+-dontnote
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
new file mode 100644
index 0000000..204403d
--- /dev/null
+++ b/core/sdk_font.mk
@@ -0,0 +1,66 @@
+###############################################################################
+# Fonts shipped with the SDK need to be renamed for Java to handle them
+# properly. Hence, a special script is used to rename the fonts. We bundle all
+# the fonts that are shipped on a newer non-space-constrained device. However,
+# OpenType fonts used on these devices are not supported by Java. Their
+# replacements are added separately.
+###############################################################################
+
+
+# The script that renames the font.
+sdk_font_rename_script := frameworks/base/tools/layoutlib/rename_font/build_font_single.py
+
+# Location of the fonttools library that the above script depends on.
+fonttools_lib := external/fonttools/Lib
+
+# A temporary location to store the renamed fonts. atree picks all files in
+# this directory and bundles it with the SDK.
+SDK_FONT_TEMP := $(call intermediates-dir-for,PACKAGING,sdk-fonts,HOST,COMMON)
+
+# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
+sdk_font_config := $(wildcard frameworks/base/data/fonts/*.xml)
+sdk_font_config :=  $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_font_config)))
+
+$(sdk_font_config): $(SDK_FONT_TEMP)/%.xml: \
+			frameworks/base/data/fonts/%.xml
+	$(hide) mkdir -p $(dir $@)
+	$(hide) cp -vf $< $@
+
+# List of fonts on the device that we want to ship. This is all .ttf fonts.
+sdk_fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf, $(INTERNAL_SYSTEMIMAGE_FILES))
+sdk_fonts_device := $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_fonts_device)))
+
+# Macro to rename the font.
+sdk_rename_font = PYTHONPATH=$$PYTHONPATH:$(fonttools_lib) $(sdk_font_rename_script) \
+	    $1 $2
+
+# TODO: If the font file is a symlink, reuse the font renamed from the symlink
+# target.
+$(sdk_fonts_device): $(SDK_FONT_TEMP)/%.ttf: $(TARGET_OUT)/fonts/%.ttf \
+			$(sdk_font_rename_script)
+	$(hide) mkdir -p $(dir $@)
+	$(hide) $(call sdk_rename_font,$<,$@)
+
+# List of all dependencies - all fonts and configuration files.
+SDK_FONT_DEPS := $(sdk_fonts_device) $(sdk_font_config)
+
+# Define a macro to create rule for addititional fonts that we want to include
+# in the SDK.
+# $1 Output font name
+# $2 Source font path
+define sdk-extra-font-rule
+fontfullname := $$(SDK_FONT_TEMP)/$1
+ifeq ($$(filter $$(fontfullname),$$(sdk_fonts_device)),)
+SDK_FONT_DEPS += $$(fontfullname)
+$$(fontfullname): $2 $$(sdk_font_rename_script)
+	$$(hide) mkdir -p $$(dir $$@)
+	$$(hide) $$(call sdk_rename_font,$$<,$$@)
+endif
+fontfullname :=
+endef
+
+# These extra fonts are used as a replacement for OpenType fonts.
+$(eval $(call sdk-extra-font-rule,NanumGothic.ttf,external/naver-fonts/NanumGothic.ttf))
+$(eval $(call sdk-extra-font-rule,DroidSansFallback.ttf,frameworks/base/data/fonts/DroidSansFallbackFull.ttf))
+
+sdk-extra-font-rule :=
diff --git a/core/setup_one_odex.mk b/core/setup_one_odex.mk
new file mode 100644
index 0000000..ec8a28a
--- /dev/null
+++ b/core/setup_one_odex.mk
@@ -0,0 +1,39 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Set up variables and dependency for one odex file
+# Input variables: my_2nd_arch_prefix
+# Output(modified) variables: built_odex, installed_odex, built_installed_odex
+
+my_built_odex := $(call get-odex-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(LOCAL_BUILT_MODULE))
+ifdef LOCAL_DEX_PREOPT_IMAGE_LOCATION
+my_dex_preopt_image_location := $(LOCAL_DEX_PREOPT_IMAGE_LOCATION)
+else
+my_dex_preopt_image_location := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
+endif
+my_dex_preopt_image_filename := $(call get-image-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(my_dex_preopt_image_location))
+$(my_built_odex): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
+$(my_built_odex): PRIVATE_DEX_LOCATION := $(patsubst $(PRODUCT_OUT)%,%,$(LOCAL_INSTALLED_MODULE))
+$(my_built_odex): PRIVATE_DEX_PREOPT_IMAGE_LOCATION := $(my_dex_preopt_image_location)
+$(my_built_odex) : $($(my_2nd_arch_prefix)DEXPREOPT_ONE_FILE_DEPENDENCY_BUILT_BOOT_PREOPT) \
+    $(DEXPREOPT_ONE_FILE_DEPENDENCY_TOOLS) \
+    $(my_dex_preopt_image_filename)
+
+my_installed_odex := $(call get-odex-file-path,$($(my_2nd_arch_prefix)DEX2OAT_TARGET_ARCH),$(LOCAL_INSTALLED_MODULE))
+
+built_odex += $(my_built_odex)
+installed_odex += $(my_installed_odex)
+built_installed_odex += $(my_built_odex):$(my_installed_odex)
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index abef11f..c01be37 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -40,7 +40,6 @@
 
 # Define PRIVATE_ variables from global vars
 my_target_global_ld_dirs := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_LD_DIRS)
-my_target_fdo_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_LIB)
 my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
 my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
 my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTBEGIN_SO_O)
@@ -58,7 +57,6 @@
 endif
 $(linked_module): PRIVATE_TARGET_GLOBAL_LD_DIRS := $(my_target_global_ld_dirs)
 $(linked_module): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
-$(linked_module): PRIVATE_TARGET_FDO_LIB := $(my_target_fdo_lib)
 $(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
 $(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
 $(linked_module): PRIVATE_TARGET_CRTBEGIN_SO_O := $(my_target_crtbegin_so_o)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 8e6029d..02078e0 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -78,7 +78,7 @@
 framework_res_package_export_deps :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
 framework_res_package_export_deps := $(framework_res_package_export)
@@ -98,7 +98,7 @@
 $(R_file_stamp): PRIVATE_RESOURCE_PUBLICS_OUTPUT := $(intermediates.COMMON)/public_resources.xml
 $(R_file_stamp): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
 $(R_file_stamp): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
-ifneq (,$(filter-out current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
 $(R_file_stamp): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
 else
 $(R_file_stamp): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
index 00b78b9..fc98f5b 100644
--- a/core/tasks/apicheck.mk
+++ b/core/tasks/apicheck.mk
@@ -42,6 +42,8 @@
     checkapi-last, \
     $(SRC_API_DIR)/$(last_released_sdk_version).txt, \
     $(INTERNAL_PLATFORM_API_FILE), \
+    frameworks/base/api/removed.txt, \
+    $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
     -hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
     -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
     -error 16 -error 17 -error 18 , \
@@ -56,11 +58,13 @@
     checkapi-current, \
     frameworks/base/api/current.txt, \
     $(INTERNAL_PLATFORM_API_FILE), \
+    frameworks/base/api/removed.txt, \
+    $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
     -error 2 -error 3 -error 4 -error 5 -error 6 \
     -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
     -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
     -error 25 -error 26 -error 27, \
-    cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+    sed -e 's/%UPDATE_API%/update-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
     checkapi, \
     $(call doc-timestamp-for,api-stubs) \
     ))
@@ -69,5 +73,51 @@
 update-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP)
 	@echo Copying current.txt
 	$(hide) $(ACP) $(INTERNAL_PLATFORM_API_FILE) frameworks/base/api/current.txt
+	@echo Copying removed.txt
+	$(hide) $(ACP) $(INTERNAL_PLATFORM_REMOVED_API_FILE) frameworks/base/api/removed.txt
+
+
+#####################Check System API#####################
+.PHONY: checksystemapi
+
+# Check that the System API we're building hasn't broken the last-released
+# SDK version.
+$(eval $(call check-api, \
+    checksystemapi-last, \
+    $(SRC_SYSTEM_API_DIR)/$(last_released_sdk_version).txt, \
+    $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
+    frameworks/base/api/system-removed.txt, \
+    $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
+    -hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
+    -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
+    -error 16 -error 17 -error 18 , \
+    cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
+    checksystemapi, \
+    $(call doc-timestamp-for,system-api-stubs) \
+    ))
+
+# Check that the System API we're building hasn't changed from the not-yet-released
+# SDK version.
+$(eval $(call check-api, \
+    checksystemapi-current, \
+    frameworks/base/api/system-current.txt, \
+    $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
+    frameworks/base/api/system-removed.txt, \
+    $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
+    -error 2 -error 3 -error 4 -error 5 -error 6 \
+    -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
+    -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
+    -error 25 -error 26 -error 27, \
+    sed -e 's/%UPDATE_API%/update-system-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+    checksystemapi, \
+    $(call doc-timestamp-for,system-api-stubs) \
+    ))
+
+.PHONY: update-system-api
+update-system-api: $(INTERNAL_PLATFORM_SYSTEM_API_FILE) | $(ACP)
+	@echo Copying system-current.txt
+	$(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_API_FILE) frameworks/base/api/system-current.txt
+	@echo Copying system-removed.txt
+	$(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE) frameworks/base/api/system-removed.txt
 
 endif
diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk
new file mode 100644
index 0000000..188c267
--- /dev/null
+++ b/core/tasks/boot_jars_package_check.mk
@@ -0,0 +1,46 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Rules to check if classes in the boot jars are from the whitelisted packages.
+#
+
+ifneq ($(SKIP_BOOT_JARS_CHECK),true)
+ifneq ($(TARGET_BUILD_PDK),true)
+ifdef PRODUCT_BOOT_JARS
+
+intermediates := $(call intermediates-dir-for, PACKAGING, boot-jars-package-check,,COMMON)
+stamp := $(intermediates)/stamp
+built_boot_jars := $(foreach j, $(PRODUCT_BOOT_JARS), \
+  $(call intermediates-dir-for, JAVA_LIBRARIES, $(j),,COMMON)/classes.jar)
+script := build/core/tasks/check_boot_jars/check_boot_jars.py
+whitelist_file := build/core/tasks/check_boot_jars/package_whitelist.txt
+
+$(stamp): PRIVATE_BOOT_JARS := $(built_boot_jars)
+$(stamp): PRIVATE_SCRIPT := $(script)
+$(stamp): PRIVATE_WHITELIST := $(whitelist_file)
+$(stamp) : $(built_boot_jars) $(script) $(whitelist_file)
+	@echo "Check package name for $(PRIVATE_BOOT_JARS)"
+	$(hide) $(PRIVATE_SCRIPT) $(PRIVATE_WHITELIST) $(PRIVATE_BOOT_JARS)
+	$(hide) mkdir -p $(dir $@) && touch $@
+
+.PHONY: check-boot-jars
+check-boot-jars : $(stamp)
+
+# Run check-boot-jars by default
+droidcore : check-boot-jars
+
+endif  # PRODUCT_BOOT_JARS
+endif  # TARGET_BUILD_PDK not true
+endif  # SKIP_BOOT_JARS_CHECK not true
diff --git a/core/tasks/check_boot_jars/check_boot_jars.py b/core/tasks/check_boot_jars/check_boot_jars.py
new file mode 100755
index 0000000..89d9ee8
--- /dev/null
+++ b/core/tasks/check_boot_jars/check_boot_jars.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+"""
+Check boot jars.
+
+Usage: check_boot_jars.py <package_whitelist_file> <jar1> <jar2> ...
+"""
+import logging
+import os.path
+import re
+import subprocess
+import sys
+
+
+# The compiled whitelist RE.
+whitelist_re = None
+
+
+def LoadWhitelist(filename):
+  """ Load and compile whitelist regular expressions from filename.
+  """
+  lines = []
+  with open(filename, 'r') as f:
+    for line in f:
+      line = line.strip()
+      if not line or line.startswith('#'):
+        continue
+      lines.append(line)
+  combined_re = r'^(%s)$' % '|'.join(lines)
+  global whitelist_re
+  try:
+    whitelist_re = re.compile(combined_re)
+  except re.error:
+    logging.exception(
+        'Cannot compile package whitelist regular expression: %r',
+        combined_re)
+    whitelist_re = None
+    return False
+  return True
+
+
+def CheckJar(jar):
+  """Check a jar file.
+  """
+  # Get the list of files inside the jar file.
+  p = subprocess.Popen(args='jar tf %s' % jar,
+      stdout=subprocess.PIPE, shell=True)
+  stdout, _ = p.communicate()
+  if p.returncode != 0:
+    return False
+  items = stdout.split()
+  for f in items:
+    if f.endswith('.class'):
+      package_name = os.path.dirname(f)
+      package_name = package_name.replace('/', '.')
+      # Skip class without a package name
+      if package_name and not whitelist_re.match(package_name):
+        print >> sys.stderr, ('Error: %s: unknown package name of class file %s'
+                              % (jar, f))
+        return False
+  return True
+
+
+def main(argv):
+  if len(argv) < 2:
+    print __doc__
+    sys.exit(1)
+
+  if not LoadWhitelist(argv[0]):
+    sys.exit(1)
+
+  passed = True
+  for jar in argv[1:]:
+    if not CheckJar(jar):
+      passed = False
+  if not passed:
+    return 1
+
+  return 0
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/core/tasks/check_boot_jars/package_whitelist.txt b/core/tasks/check_boot_jars/package_whitelist.txt
new file mode 100644
index 0000000..4d62615
--- /dev/null
+++ b/core/tasks/check_boot_jars/package_whitelist.txt
@@ -0,0 +1,214 @@
+# Boot jar package name whitelist.
+# Each line is interpreted as a regular expression.
+
+###################################################
+# core-libart.jar
+java\.awt\.font
+java\.beans
+java\.io
+java\.lang
+java\.lang\.annotation
+java\.lang\.ref
+java\.lang\.reflect
+java\.math
+java\.net
+java\.nio
+java\.nio\.channels
+java\.nio\.channels\.spi
+java\.nio\.charset
+java\.nio\.charset\.spi
+java\.security
+java\.security\.acl
+java\.security\.cert
+java\.security\.interfaces
+java\.security\.spec
+java\.sql
+java\.text
+java\.util
+java\.util\.concurrent
+java\.util\.concurrent\.atomic
+java\.util\.concurrent\.locks
+java\.util\.jar
+java\.util\.logging
+java\.util\.prefs
+java\.util\.regex
+java\.util\.zip
+javax\.crypto
+javax\.crypto\.interfaces
+javax\.crypto\.spec
+javax\.net
+javax\.net\.ssl
+javax\.security\.auth
+javax\.security\.auth\.callback
+javax\.security\.auth\.login
+javax\.security\.auth\.x500
+javax\.security\.cert
+javax\.sql
+javax\.xml
+javax\.xml\.datatype
+javax\.xml\.namespace
+javax\.xml\.parsers
+javax\.xml\.transform
+javax\.xml\.transform\.dom
+javax\.xml\.transform\.sax
+javax\.xml\.transform\.stream
+javax\.xml\.validation
+javax\.xml\.xpath
+sun\.misc
+org\.w3c\.dom
+org\.w3c\.dom\.ls
+org\.w3c\.dom\.traversal
+
+# TODO: Move these internal org.apache.harmony classes to libcore.*
+org\.apache\.harmony\.crypto\.internal
+org\.apache\.harmony\.dalvik
+org\.apache\.harmony\.dalvik\.ddmc
+org\.apache\.harmony\.luni\.internal\.util
+org\.apache\.harmony\.security
+org\.apache\.harmony\.security\.asn1
+org\.apache\.harmony\.security\.fortress
+org\.apache\.harmony\.security\.pkcs10
+org\.apache\.harmony\.security\.pkcs7
+org\.apache\.harmony\.security\.pkcs8
+org\.apache\.harmony\.security\.provider\.crypto
+org\.apache\.harmony\.security\.utils
+org\.apache\.harmony\.security\.x501
+org\.apache\.harmony\.security\.x509
+org\.apache\.harmony\.security\.x509\.tsp
+org\.apache\.harmony\.xml
+org\.apache\.harmony\.xml\.dom
+org\.apache\.harmony\.xml\.parsers
+
+org\.json
+org\.xmlpull\.v1
+org\.xmlpull\.v1\.sax2
+
+# TODO:  jarjar org.kxml2.io to com.android org\.kxml2\.io
+org\.kxml2\.io
+org\.xml
+org\.xml\.sax
+org\.xml\.sax\.ext
+org\.xml\.sax\.helpers
+
+dalvik\..*
+libcore\..*
+android\..*
+com\.android\..*
+
+
+###################################################
+# core-junit.jar
+junit\.extensions
+junit\.framework
+
+
+###################################################
+# ext.jar
+# TODO: jarjar javax.sip to com.android
+javax\.sip
+javax\.sip\.address
+javax\.sip\.header
+javax\.sip\.message
+
+# TODO: jarjar org.apache.commons to com.android
+org\.apache\.commons\.codec
+org\.apache\.commons\.codec\.binary
+org\.apache\.commons\.codec\.language
+org\.apache\.commons\.codec\.net
+org\.apache\.commons\.logging
+org\.apache\.commons\.logging\.impl
+org\.apache\.http
+org\.apache\.http\.auth
+org\.apache\.http\.auth\.params
+org\.apache\.http\.client
+org\.apache\.http\.client\.entity
+org\.apache\.http\.client\.methods
+org\.apache\.http\.client\.params
+org\.apache\.http\.client\.protocol
+org\.apache\.http\.client\.utils
+org\.apache\.http\.conn
+org\.apache\.http\.conn\.params
+org\.apache\.http\.conn\.routing
+org\.apache\.http\.conn\.scheme
+org\.apache\.http\.conn\.ssl
+org\.apache\.http\.conn\.util
+org\.apache\.http\.cookie
+org\.apache\.http\.cookie\.params
+org\.apache\.http\.entity
+org\.apache\.http\.impl
+org\.apache\.http\.impl\.auth
+org\.apache\.http\.impl\.client
+org\.apache\.http\.impl\.client
+org\.apache\.http\.impl\.conn
+org\.apache\.http\.impl\.conn\.tsccm
+org\.apache\.http\.impl\.cookie
+org\.apache\.http\.impl\.entity
+org\.apache\.http\.impl\.io
+org\.apache\.http\.impl\.io
+org\.apache\.http\.io
+org\.apache\.http\.message
+org\.apache\.http\.params
+org\.apache\.http\.protocol
+org\.apache\.http\.util
+
+# TODO: jarjar gov.nist to com.android
+gov\.nist\.core
+gov\.nist\.core\.net
+gov\.nist\.javax\.sip
+gov\.nist\.javax\.sip\.address
+gov\.nist\.javax\.sip\.clientauthutils
+gov\.nist\.javax\.sip\.header
+gov\.nist\.javax\.sip\.header\.extensions
+gov\.nist\.javax\.sip\.header\.ims
+gov\.nist\.javax\.sip\.message
+gov\.nist\.javax\.sip\.parser
+gov\.nist\.javax\.sip\.parser\.extensions
+gov\.nist\.javax\.sip\.parser\.ims
+gov\.nist\.javax\.sip\.stack
+
+org\.ccil\.cowan\.tagsoup
+org\.ccil\.cowan\.tagsoup\.jaxp
+
+###################################################
+# framework.jar
+javax\.microedition\.khronos\.opengles
+javax\.microedition\.khronos\.egl
+
+android
+
+
+###################################################
+# telephony-common.jar
+com\.google\..*
+
+
+###################################################
+# apache-xml.jar
+org\.apache\.xml\.res
+org\.apache\.xml\.utils
+org\.apache\.xml\.utils\.res
+org\.apache\.xml\.dtm
+org\.apache\.xml\.dtm\.ref
+org\.apache\.xml\.dtm\.ref\.dom2dtm
+org\.apache\.xml\.dtm\.ref\.sax2dtm
+org\.apache\.xml\.serializer
+org\.apache\.xml\.serializer\.utils
+org\.apache\.xml\.serializer\.dom3
+org\.apache\.xpath
+org\.apache\.xpath\.operations
+org\.apache\.xpath\.domapi
+org\.apache\.xpath\.functions
+org\.apache\.xpath\.res
+org\.apache\.xpath\.axes
+org\.apache\.xpath\.objects
+org\.apache\.xpath\.patterns
+org\.apache\.xpath\.jaxp
+org\.apache\.xpath\.compiler
+org\.apache\.xalan
+org\.apache\.xalan\.res
+org\.apache\.xalan\.templates
+org\.apache\.xalan\.serialize
+org\.apache\.xalan\.extensions
+org\.apache\.xalan\.processor
+org\.apache\.xalan\.transformer
+org\.apache\.xalan\.xslt
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index c521fa3..2ce5dbc 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -17,8 +17,7 @@
 
 cts_name := android-cts
 
-DDMLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/ddmlib-prebuilt.jar
-junit_host_jar := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
+JUNIT_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
 HOSTTESTLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar
 TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
 CTS_TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar
@@ -48,6 +47,7 @@
 	android.core.tests.libcore.package.harmony_java_text \
 	android.core.tests.libcore.package.harmony_java_util \
 	android.core.tests.libcore.package.harmony_javax_security \
+	android.core.tests.libcore.package.okhttp \
 	android.core.tests.runner
 
 # The list of test packages that apache-harmony-tests (external/apache-harmony/Android.mk)
@@ -58,19 +58,33 @@
 	android.core.tests.libcore.package.harmony_prefs \
 	android.core.tests.libcore.package.harmony_sql
 
+
+CTS_TEST_JAR_LIST := \
+        cts-junit \
+	CtsJdwp
+
 # Depend on the full package paths rather than the phony targets to avoid
 # rebuilding the packages every time.
 CTS_CORE_CASES := $(foreach pkg,$(CTS_CORE_CASE_LIST),$(call intermediates-dir-for,APPS,$(pkg))/package.apk)
+CTS_TEST_JAR_FILES := $(foreach c,$(CTS_TEST_JAR_LIST),$(call intermediates-dir-for,JAVA_LIBRARIES,$(c))/javalib.jar)
 
 -include cts/CtsTestCaseList.mk
 CTS_CASE_LIST := $(CTS_CORE_CASE_LIST) $(CTS_TEST_CASE_LIST)
 
+# A module may have mutliple installed files (e.g. split apks)
+CTS_CASE_LIST_APKS :=
+CTS_CASE_LIST_APKS_DIR := $(cts_dir)/$(cts_name)/repository/testcases/
+$(foreach m, $(CTS_CASE_LIST),\
+  $(foreach fp, $(ALL_MODULES.$(m).BUILT_INSTALLED),\
+    $(eval pair := $(subst :,$(space),$(fp)))\
+    $(eval built := $(word 1,$(pair)))\
+    $(eval installed := $(CTS_CASE_LIST_APKS_DIR)/$(notdir $(word 2,$(pair))))\
+    $(eval $(call copy-one-file, $(built), $(installed)))\
+    $(eval CTS_CASE_LIST_APKS += $(installed))))
+
 DEFAULT_TEST_PLAN := $(cts_dir)/$(cts_name)/resource/plans
-CTS_TEST_CASE_LIST_FILES := $(foreach c, $(CTS_TEST_CASE_LIST), $(call intermediates-dir-for,APPS,$(c))/package.apk)
-$(cts_dir)/all_cts_files_stamp: PRIVATE_JUNIT_HOST_JAR := $(junit_host_jar)
-$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_CASES) $(CTS_TEST_CASE_LIST_FILES) $(junit_host_jar) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ACP)
+$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_CASES) $(CTS_CASE_LIST_APKS) $(JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ACP) $(CTS_TEST_JAR_FILES)
 # Make necessary directory for CTS
-	$(hide) rm -rf $(PRIVATE_CTS_DIR)
 	$(hide) mkdir -p $(TMP_DIR)
 	$(hide) mkdir -p $(PRIVATE_DIR)/docs
 	$(hide) mkdir -p $(PRIVATE_DIR)/tools
@@ -78,9 +92,9 @@
 	$(hide) mkdir -p $(PRIVATE_DIR)/repository/plans
 # Copy executable and JARs to CTS directory
 	$(hide) $(ACP) -fp $(VMTESTSTF_JAR) $(PRIVATE_DIR)/repository/testcases
-	$(hide) $(ACP) -fp $(DDMLIB_JAR) $(PRIVATE_JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(PRIVATE_DIR)/tools
+	$(hide) $(ACP) -fp $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(PRIVATE_DIR)/tools
 # Change mode of the executables
-	$(foreach apk,$(CTS_CASE_LIST),$(call copy-testcase-apk,$(apk)))
+	$(foreach jar,$(CTS_TEST_JAR_LIST),$(call copy-testcase-jar,$(jar)))
 	$(foreach testcase,$(CTS_TEST_CASES),$(call copy-testcase,$(testcase)))
 	$(hide) touch $@
 
@@ -90,29 +104,32 @@
 # $2 : The AndroidManifest.xml corresponding to the test package
 # $3 : The jar file name on PRIVATE_CLASSPATH containing junit tests to search for
 # $4 : The package prefix of classes to include, possible empty
-# $5 : The directory containing vogar expectations files
-# $6 : The Android.mk corresponding to the test package (required for host-side tests only)
+# $5 : The architecture of the current build
+# $6 : The directory containing vogar expectations files
+# $7 : The Android.mk corresponding to the test package (required for host-side tests only)
 define generate-core-test-description
 @echo "Generate core-test description ("$(notdir $(1))")"
 $(hide) java -Xmx256M \
-	-Xbootclasspath/a:$(PRIVATE_CLASSPATH) \
-	-classpath $(PRIVATE_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_OUT_JAVA_LIBRARIES)/junit.jar:$(HOST_JDK_TOOLS_JAR) \
-	$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6)
+	-Xbootclasspath/a:$(PRIVATE_CLASSPATH):$(JUNIT_HOST_JAR) \
+	-classpath $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_JDK_TOOLS_JAR) \
+	$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6) $(7)
 endef
 
 CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
 CONSCRYPT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt,,COMMON)
 BOUNCYCASTLE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,bouncycastle,,COMMON)
 APACHEXML_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-xml,,COMMON)
-OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
-APACHEHARMONY_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
+OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-nojarjar,,COMMON)
+OKHTTPTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-tests-nojarjar,,COMMON)
+OKHTTP_REPACKAGED_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
+APACHEHARMONYTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
 SQLITEJDBC_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,sqlite-jdbc,,COMMON)
 JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
 CORETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-tests,,COMMON)
 JSR166TESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,jsr166-tests,,COMMON)
 CONSCRYPTTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt-tests,,COMMON)
 
-GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONY_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar
+GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar
 
 CTS_CORE_XMLS := \
 	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml \
@@ -136,6 +153,7 @@
 	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml \
 	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml \
 	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml \
 
 $(CTS_CORE_XMLS): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
 # Why does this depend on javalib.jar instead of classes.jar?  Because
@@ -143,155 +161,161 @@
 # build system requires that dependencies use javalib.jar.  If
 # javalib.jar is up-to-date, then classes.jar is as well.  Depending
 # on classes.jar will build the files incorrectly.
-CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(HOST_OUT_JAVA_LIBRARIES)/junit.jar $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONY_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
+CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(OKHTTPTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_REPACKAGED_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik,\
 		cts/tests/core/libcore/dalvik/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,dalvik,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com,\
 		cts/tests/core/libcore/com/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,com,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt,\
 		cts/tests/core/libcore/conscrypt/AndroidManifest.xml,\
 		$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar,,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun,\
 		cts/tests/core/libcore/sun/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,sun,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests,\
 		cts/tests/core/libcore/tests/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,tests,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org,\
 		cts/tests/core/libcore/org/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore,\
 		cts/tests/core/libcore/libcore/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,libcore,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166,\
 		cts/tests/core/libcore/jsr166/AndroidManifest.xml,\
 		$(JSR166TESTS_INTERMEDIATES)/javalib.jar,jsr166,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation,\
 		cts/tests/core/libcore/harmony_annotation/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.annotation.tests,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io,\
 		cts/tests/core/libcore/harmony_java_io/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.io,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang,\
 		cts/tests/core/libcore/harmony_java_lang/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.lang,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math,\
 		cts/tests/core/libcore/harmony_java_math/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.math,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net,\
 		cts/tests/core/libcore/harmony_java_net/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.net,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio,\
 		cts/tests/core/libcore/harmony_java_nio/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.nio,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text,\
 		cts/tests/core/libcore/harmony_java_text/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.text,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util,\
 		cts/tests/core/libcore/harmony_java_util/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.util,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security,\
 		cts/tests/core/libcore/harmony_javax_security/AndroidManifest.xml,\
 		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.javax.security,\
-		libcore/expectations)
+		$(TARGET_ARCH),libcore/expectations)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans,\
 		cts/tests/core/libcore/harmony_beans/AndroidManifest.xml,\
-		$(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
-		libcore/expectations external/apache-harmony/Android.mk)
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging,\
 		cts/tests/core/libcore/harmony_logging/AndroidManifest.xml,\
-		$(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
-		libcore/expectations external/apache-harmony/Android.mk)
-
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs,\
 		cts/tests/core/libcore/harmony_prefs/AndroidManifest.xml,\
-		$(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
-		libcore/expectations external/apache-harmony/Android.mk)
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
 
 $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml: $(CTS_CORE_XMLS_DEPS)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql,\
 		cts/tests/core/libcore/harmony_sql/AndroidManifest.xml,\
-		$(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
-		libcore/expectations external/apache-harmony/Android.mk)
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp,\
+		cts/tests/core/libcore/okhttp/AndroidManifest.xml,\
+		$(OKHTTPTESTS_INTERMEDIATES)/javalib.jar,,\
+		$(TARGET_ARCH),libcore/expectations)
 
 # ----- Generate the test descriptions for the vm-tests-tf -----
 #
@@ -301,15 +325,16 @@
 CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
 JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
 
-GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(DDMLIB_JAR):$(TF_JAR)
+GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(TF_JAR)
 
 $(CORE_VM_TEST_TF_DESC): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
 # Please see big comment above on why this line depends on javalib.jar instead of classes.jar
-$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(HOST_OUT_JAVA_LIBRARIES)/junit.jar $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) $(DDMLIB_JAR) | $(ACP)
+$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) | $(ACP)
 	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
 	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.vm-tests-tf,\
 		cts/tests/vm-tests-tf/AndroidManifest.xml,\
 		$(VMTESTSTF_JAR),"",\
+		$(TARGET_ARCH),\
 		libcore/expectations,\
 		cts/tools/vm-tests-tf/Android.mk)
 
@@ -339,15 +364,16 @@
 cts: $(INTERNAL_CTS_TARGET) adb
 $(call dist-for-goals,cts,$(INTERNAL_CTS_TARGET))
 
-define copy-testcase-apk
-
-$(hide) $(ACP) -fp $(call intermediates-dir-for,APPS,$(1))/package.apk \
-	$(PRIVATE_DIR)/repository/testcases/$(1).apk
-
-endef
 
 define copy-testcase
 
 $(hide) $(ACP) -fp $(1) $(PRIVATE_DIR)/repository/testcases/$(notdir $1)
 
 endef
+
+define copy-testcase-jar
+
+$(hide) $(ACP) -fp $(call intermediates-dir-for,JAVA_LIBRARIES,$(1))/javalib.jar \
+	$(PRIVATE_DIR)/repository/testcases/$(1).jar
+
+endef
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
new file mode 100644
index 0000000..26b9aba
--- /dev/null
+++ b/core/tasks/oem_image.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# We build oem.img only if it's asked for.
+ifneq ($(filter $(MAKECMDGOALS),oem_image),)
+ifndef BOARD_OEMIMAGE_PARTITION_SIZE
+$(error BOARD_OEMIMAGE_PARTITION_SIZE is not set.)
+endif
+
+INTERNAL_OEMIMAGE_FILES := \
+    $(filter $(TARGET_OUT_OEM)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
+
+oemimage_intermediates := \
+    $(call intermediates-dir-for,PACKAGING,oem)
+BUILT_OEMIMAGE_TARGET := $(PRODUCT_OUT)/oem.img
+# We just build this directly to the install location.
+INSTALLED_OEMIMAGE_TARGET := $(BUILT_OEMIMAGE_TARGET)
+
+$(INSTALLED_OEMIMAGE_TARGET) : $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_OEMIMAGE_FILES)
+	$(call pretty,"Target oem fs image: $@")
+	@mkdir -p $(TARGET_OUT_OEM)
+	@mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
+	$(call generate-userimage-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt, skip_fsck=true)
+	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+	  ./build/tools/releasetools/build_image.py \
+	  $(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@
+	$(hide) $(call assert-max-image-size,$@,$(BOARD_OEMIMAGE_PARTITION_SIZE))
+
+.PHONY: oem_image
+oem_image : $(INSTALLED_OEMIMAGE_TARGET)
+$(call dist-for-goals, oem_image, $(INSTALLED_OEMIMAGE_TARGET))
+
+endif  # oem_image in $(MAKECMDGOALS)
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 5a24a00..5ac9b7d 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -18,11 +18,12 @@
 addon_name := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_NAME))
 ifneq ($(addon_name),)
 
-addon_dir_leaf := $(addon_name)-$(FILE_NAME_TAG)-$(INTERNAL_SDK_HOST_OS_NAME)
-
-intermediates := $(HOST_OUT_INTERMEDIATES)/SDK_ADDON/$(addon_name)_intermediates
-full_target := $(HOST_OUT_SDK_ADDON)/$(addon_dir_leaf).zip
-staging := $(intermediates)/$(addon_dir_leaf)
+addon_dir_leaf  := $(addon_name)-$(FILE_NAME_TAG)-$(INTERNAL_SDK_HOST_OS_NAME)
+addon_dir_img   := $(addon_dir_leaf)-img
+intermediates   := $(HOST_OUT_INTERMEDIATES)/SDK_ADDON/$(addon_name)_intermediates
+full_target     := $(HOST_OUT_SDK_ADDON)/$(addon_dir_leaf).zip
+full_target_img := $(HOST_OUT_SDK_ADDON)/$(addon_dir_img).zip
+staging         := $(intermediates)
 
 sdk_addon_deps :=
 files_to_copy :=
@@ -46,29 +47,54 @@
   $(eval _src := $(call stub-addon-jar-file,$(_src))) \
   $(if $(_src),,$(eval $(error Unknown or unlinkable module: $(call word-colon,1,$(cf)). Requested by $(INTERNAL_PRODUCT)))) \
   $(eval _dest := $(call word-colon,2,$(cf))) \
-  $(eval files_to_copy += $(_src):$(_dest)) \
+  $(eval files_to_copy += $(addon_dir_leaf):$(_src):$(_dest)) \
  )
 endif
 
 # Files that are copied directly into the sdk-addon
-files_to_copy += $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_FILES)
+ifneq ($(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_FILES)),)
+$(foreach cf,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_COPY_FILES), \
+  $(eval _src  := $(call word-colon,1,$(cf))) \
+  $(eval _dest := $(call word-colon,2,$(cf))) \
+  $(if $(findstring images/,$(_dest)), $(eval _root := $(addon_dir_img)), $(eval _root := $(addon_dir_leaf))) \
+  $(eval files_to_copy += $(_root):$(_src):$(_dest)) \
+ )
+endif
 
-# All SDK add-ons have these files
+# Files copied in the system-image directory
 files_to_copy += \
-        $(BUILT_SYSTEMIMAGE):images/$(TARGET_CPU_ABI)/system.img \
-        $(BUILT_USERDATAIMAGE_TARGET):images/$(TARGET_CPU_ABI)/userdata.img \
-        $(BUILT_RAMDISK_TARGET):images/$(TARGET_CPU_ABI)/ramdisk.img \
-        $(PRODUCT_OUT)/system/build.prop:images/$(TARGET_CPU_ABI)/build.prop \
-        $(target_notice_file_txt):images/$(TARGET_CPU_ABI)/NOTICE.txt
+	$(addon_dir_img):$(BUILT_SYSTEMIMAGE):images/$(TARGET_CPU_ABI)/system.img \
+	$(addon_dir_img):$(BUILT_USERDATAIMAGE_TARGET):images/$(TARGET_CPU_ABI)/userdata.img \
+	$(addon_dir_img):$(BUILT_RAMDISK_TARGET):images/$(TARGET_CPU_ABI)/ramdisk.img \
+	$(addon_dir_img):$(PRODUCT_OUT)/system/build.prop:images/$(TARGET_CPU_ABI)/build.prop \
+	$(addon_dir_img):$(target_notice_file_txt):images/$(TARGET_CPU_ABI)/NOTICE.txt \
+	$(addon_dir_img):$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP):images/source.properties
 
 # Generate rules to copy the requested files
 $(foreach cf,$(files_to_copy), \
-  $(eval _src := $(call word-colon,1,$(cf))) \
-  $(eval _dest := $(call append-path,$(staging),$(call word-colon,2,$(cf)))) \
+  $(eval _root := $(call word-colon,1,$(cf))) \
+  $(eval _src  := $(call word-colon,2,$(cf))) \
+  $(eval _dest := $(call append-path,$(call append-path,$(staging),$(_root)),$(call word-colon,3,$(cf)))) \
   $(eval $(call copy-one-file,$(_src),$(_dest))) \
   $(eval sdk_addon_deps += $(_dest)) \
  )
 
+# The system-image source.properties is a template that we directly expand in-place
+addon_img_source_prop := $(call append-path,$(staging),$(addon_dir_img))/images/$(TARGET_CPU_ABI)/source.properties
+sdk_addon_deps += $(addon_img_source_prop)
+
+$(addon_img_source_prop): $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SDK_ADDON_SYS_IMG_SOURCE_PROP)
+	@echo Generate $@
+	$(hide) mkdir -p $(dir $@)
+	$(hide) sed \
+		-e 's/$${PLATFORM_VERSION}/$(PLATFORM_VERSION)/' \
+		-e 's/$${PLATFORM_SDK_VERSION}/$(PLATFORM_SDK_VERSION)/' \
+		-e 's/$${PLATFORM_VERSION_CODENAME}/$(subst REL,,$(PLATFORM_VERSION_CODENAME))/' \
+		-e 's/$${TARGET_ARCH}/$(TARGET_ARCH)/' \
+		-e 's/$${TARGET_CPU_ABI}/$(TARGET_CPU_ABI)/' \
+		$< > $@ && sed -i -e '/^AndroidVersion.CodeName=\s*$$/d' $@
+
+
 # We don't know about all of the docs files, so depend on the timestamps for
 # them, and record the directories, and the packaging rule will just copy the
 # whole thing.
@@ -76,7 +102,7 @@
 sdk_addon_deps += $(foreach dm, $(doc_modules), $(call doc-timestamp-for, $(dm)))
 $(full_target): PRIVATE_DOCS_DIRS := $(addprefix $(OUT_DOCS)/, $(doc_modules))
 
-$(full_target): PRIVATE_STAGING_DIR := $(staging)
+$(full_target): PRIVATE_STAGING_DIR := $(call append-path,$(staging),$(addon_dir_leaf))
 
 $(full_target): $(sdk_addon_deps) | $(ACP)
 	@echo Packaging SDK Addon: $@
@@ -85,16 +111,24 @@
 	    $(ACP) -r $$d $(PRIVATE_STAGING_DIR)/docs ;\
 	  done
 	$(hide) mkdir -p $(dir $@)
-	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rq $$F * )
+	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rq $$F $(notdir $(PRIVATE_STAGING_DIR)) )
+
+$(full_target_img): PRIVATE_STAGING_DIR := $(call append-path,$(staging),$(addon_dir_img))/images/$(TARGET_CPU_ABI)
+$(full_target_img): $(full_target) $(addon_img_source_prop)
+	@echo Packaging SDK Addon System-Image: $@
+	$(hide) mkdir -p $(dir $@)
+	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rq $$F $(notdir $(PRIVATE_STAGING_DIR)) )
+
 
 .PHONY: sdk_addon
-sdk_addon: $(full_target)
+sdk_addon: $(full_target) $(full_target_img)
 
 ifneq ($(sdk_repo_goal),)
 # If we're building the sdk_repo, keep the name of the addon zip
 # around so that development/build/tools/sdk_repo.mk can dist it
 # at the appropriate location.
-ADDON_SDK_ZIP := $(full_target)
+ADDON_SDK_ZIP        := $(full_target)
+ADDON_SDK_IMG_ZIP    := $(full_target_img)
 else
 # When not building an sdk_repo, just dist the addon zip file
 # as-is.
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index f7e04ed..bd9cf57 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -37,16 +37,26 @@
       $(eval my_copy_pairs += $(bui):$(my_staging_dir)/$(my_copy_dest)))\
   ))
 
+define copy-tests-in-batch
+$(hide) $(foreach p, $(1),\
+  $(eval pair := $(subst :,$(space),$(p)))\
+  mkdir -p $(dir $(word 2,$(pair)));\
+  cp -rf $(word 1,$(pair)) $(word 2,$(pair));)
+endef
+
 my_package_zip := $(my_staging_dir)/$(my_package_name).zip
 $(my_package_zip): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
 $(my_package_zip): PRIVATE_PICKUP_FILES := $(my_pickup_files)
 $(my_package_zip) : $(my_built_modules)
 	@echo "Package $@"
 	@rm -rf $(dir $@) && mkdir -p $(dir $@)
-	$(hide) $(foreach p, $(PRIVATE_COPY_PAIRS), \
-	  $(eval pair := $(subst :,$(space),$(p)))\
-	  mkdir -p $(dir $(word 2,$(pair))); \
-	  cp -rf $(word 1,$(pair)) $(word 2,$(pair));)
-	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES), \
+	$(call copy-tests-in-batch,$(wordlist 1,200,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 201,400,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 401,600,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 601,800,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 801,1000,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 1001,1200,$(PRIVATE_COPY_PAIRS)))
+	$(call copy-tests-in-batch,$(wordlist 1201,9999,$(PRIVATE_COPY_PAIRS)))
+	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
 	  cp -rf $(f) $(dir $@);)
 	$(hide) cd $(dir $@) && zip -rq $(notdir $@) *
diff --git a/core/tasks/vendor_module_check.mk b/core/tasks/vendor_module_check.mk
index 9c19422..d8e8ec7 100644
--- a/core/tasks/vendor_module_check.mk
+++ b/core/tasks/vendor_module_check.mk
@@ -18,6 +18,7 @@
 _vendor_owner_whitelist := \
         asus \
         audience \
+        atmel \
         broadcom \
         csr \
         elan \
@@ -27,11 +28,17 @@
         invensense \
         intel \
         lge \
+        moto \
+        mtk \
         nvidia \
         nxp \
+        nxpsw \
         qcom \
+        qti \
         samsung \
         samsung_arm \
+        sony \
+        synaptics \
         ti \
         trusted_logic \
         widevine
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index d5b1e62..8cb8d26 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -41,7 +41,7 @@
   # which is the version that we reveal to the end user.
   # Update this value when the platform version changes (rather
   # than overriding it somewhere else).  Can be an arbitrary string.
-  PLATFORM_VERSION := 4.4W.2
+  PLATFORM_VERSION := 5.0
 endif
 
 ifeq "" "$(PLATFORM_SDK_VERSION)"
@@ -53,13 +53,18 @@
   # intermediate builds).  During development, this number remains at the
   # SDK version the branch is based on and PLATFORM_VERSION_CODENAME holds
   # the code-name of the new development work.
-  PLATFORM_SDK_VERSION := 20
+  PLATFORM_SDK_VERSION := 21
 endif
 
 ifeq "" "$(PLATFORM_VERSION_CODENAME)"
   # This is the current development code-name, if the build is not a final
   # release build.  If this is a final release build, it is simply "REL".
   PLATFORM_VERSION_CODENAME := REL
+
+  # This is all of the development codenames that are active.  Should be either
+  # the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
+  # codenames after PLATFORM_VERSION_CODENAME.
+  PLATFORM_VERSION_ALL_CODENAMES := $(PLATFORM_VERSION_CODENAME)
 endif
 
 ifeq "" "$(DEFAULT_APP_TARGET_SDK)"
diff --git a/envsetup.sh b/envsetup.sh
index af86dd0..a9bd707 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -2,7 +2,7 @@
 cat <<EOF
 Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
 - lunch:   lunch <product_name>-<build_variant>
-- tapas:   tapas [<App1> <App2> ...] [arm|x86|mips|armv5] [eng|userdebug|user]
+- tapas:   tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
 - croot:   Changes directory to the top of the tree.
 - m:       Makes from the top of the tree.
 - mm:      Builds all of the modules in the current directory, but not their dependencies.
@@ -14,6 +14,7 @@
 - ggrep:   Greps on all local Gradle files.
 - jgrep:   Greps on all local Java files.
 - resgrep: Greps on all local res/*.xml files.
+- sgrep:   Greps on all local source files.
 - godir:   Go to the directory containing a file.
 
 Look at the source to view more functions. The complete list is:
@@ -21,7 +22,7 @@
     T=$(gettop)
     local A
     A=""
-    for i in `cat $T/build/envsetup.sh | sed -n "/^function /s/function \([a-z_]*\).*/\1/p" | sort`; do
+    for i in `cat $T/build/envsetup.sh | sed -n "/^[ \t]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do
       A="$A $i"
     done
     echo $A
@@ -137,9 +138,7 @@
         arm64) toolchaindir=aarch64/aarch64-linux-android-$targetgccversion/bin;
                toolchaindir2=arm/arm-linux-androideabi-$targetgccversion2/bin
             ;;
-        mips) toolchaindir=mips/mipsel-linux-android-$targetgccversion/bin
-            ;;
-        mips64) toolchaindir=mips/mips64el-linux-android-$targetgccversion/bin
+        mips|mips64) toolchaindir=mips/mips64el-linux-android-$targetgccversion/bin
             ;;
         *)
             echo "Can't find toolchain for unknown architecture: $ARCH"
@@ -459,7 +458,6 @@
 add_lunch_combo aosp_mips64-eng
 add_lunch_combo aosp_x86-eng
 add_lunch_combo aosp_x86_64-eng
-add_lunch_combo vbox_x86-eng
 
 function print_lunch_menu()
 {
@@ -567,12 +565,12 @@
 complete -F _lunch lunch
 
 # Configures the build to build unbundled apps.
-# Run tapas with one ore more app names (from LOCAL_PACKAGE_NAME)
+# Run tapas with one or more app names (from LOCAL_PACKAGE_NAME)
 function tapas()
 {
-    local arch=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5)$'))
-    local variant=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$'))
-    local apps=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5)$'))
+    local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+    local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
 
     if [ $(echo $arch | wc -w) -gt 1 ]; then
         echo "tapas: Error: Multiple build archs supplied: $arch"
@@ -585,9 +583,12 @@
 
     local product=full
     case $arch in
-      x86)   product=full_x86;;
-      mips)  product=full_mips;;
-      armv5) product=generic_armv5;;
+      x86)    product=full_x86;;
+      mips)   product=full_mips;;
+      armv5)  product=generic_armv5;;
+      arm64)  product=aosp_arm64;;
+      x86_64) product=aosp_x86_64;;
+      mips64)  product=aosp_mips64;;
     esac
     if [ -z "$variant" ]; then
         variant=eng
@@ -609,7 +610,8 @@
 {
     local TOPFILE=build/core/envsetup.mk
     if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
-        echo $TOP
+        # The following circumlocution ensures we remove symlinks from TOP.
+        (cd $TOP; PWD= /bin/pwd)
     else
         if [ -f $TOPFILE ] ; then
             # The following circumlocution (repeated below as well) ensures
@@ -981,8 +983,7 @@
    case "$ARCH" in
        arm) GDB=arm-linux-androideabi-gdb;;
        arm64) GDB=arm-linux-androideabi-gdb; GDB64=aarch64-linux-android-gdb;;
-       mips) GDB=mipsel-linux-android-gdb;;
-       mips64) GDB=mipsel-linux-android-gdb;;
+       mips|mips64) GDB=mips64el-linux-android-gdb;;
        x86) GDB=x86_64-linux-android-gdb;;
        x86_64) GDB=x86_64-linux-android-gdb;;
        *) echo "Unknown arch $ARCH"; return 1;;
@@ -1039,11 +1040,16 @@
        fi
 
        OUT_SO_SYMBOLS=$OUT_SO_SYMBOLS$USE64BIT
+       OUT_VENDOR_SO_SYMBOLS=$OUT_VENDOR_SO_SYMBOLS$USE64BIT
 
        echo >|"$OUT_ROOT/gdbclient.cmds" "set solib-absolute-prefix $OUT_SYMBOLS"
        echo >>"$OUT_ROOT/gdbclient.cmds" "set solib-search-path $OUT_SO_SYMBOLS:$OUT_SO_SYMBOLS/hw:$OUT_SO_SYMBOLS/ssl/engines:$OUT_SO_SYMBOLS/drm:$OUT_SO_SYMBOLS/egl:$OUT_SO_SYMBOLS/soundfx:$OUT_VENDOR_SO_SYMBOLS:$OUT_VENDOR_SO_SYMBOLS/hw:$OUT_VENDOR_SO_SYMBOLS/egl"
        echo >>"$OUT_ROOT/gdbclient.cmds" "source $ANDROID_BUILD_TOP/development/scripts/gdb/dalvik.gdb"
        echo >>"$OUT_ROOT/gdbclient.cmds" "target remote $PORT"
+       # Enable special debugging for ART processes.
+       if [[ $EXE =~ (^|/)(app_process|dalvikvm)(|32|64)$ ]]; then
+          echo >> "$OUT_ROOT/gdbclient.cmds" "art-on"
+       fi
        echo >>"$OUT_ROOT/gdbclient.cmds" ""
 
        local WHICH_GDB=
@@ -1069,14 +1075,14 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cpp|S|java|xml|sh|mk)' -print0 | xargs -0 grep --color -n "$@"
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|S|java|xml|sh|mk|aidl)' -print0 | xargs -0 grep --color -n "$@"
         }
 
         ;;
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cpp\|S\|java\|xml\|sh\|mk\)' -print0 | xargs -0 grep --color -n "$@"
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|S\|java\|xml\|sh\|mk\|aidl\)' -print0 | xargs -0 grep --color -n "$@"
         }
         ;;
 esac
@@ -1226,9 +1232,7 @@
     fi
 
     # issue "am" command to cause the hprof dump
-    local sdcard=$(adb ${adbOptions} shell echo -n '$EXTERNAL_STORAGE')
-    local devFile=$sdcard/hprof-$targetPid
-    #local devFile=/data/local/hprof-$targetPid
+    local devFile=/data/local/tmp/hprof-$targetPid
     echo "Poking $targetPid and waiting for data..."
     echo "Storing data at $devFile"
     adb ${adbOptions} shell am dumpheap $targetPid $devFile
@@ -1454,10 +1458,15 @@
     return $retval
 }
 
+function get_make_command()
+{
+  echo command make
+}
+
 function make()
 {
     local start_time=$(date +"%s")
-    command make "$@"
+    $(get_make_command) "$@"
     local ret=$?
     local end_time=$(date +"%s")
     local tdiff=$(($end_time-$start_time))
@@ -1482,6 +1491,8 @@
     return $ret
 }
 
+
+
 if [ "x$SHELL" != "x/bin/bash" ]; then
     case `ps -o command -p $$` in
         *bash*)
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 62303d6..3ae5860 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -87,3 +87,8 @@
         shell.te \
         surfaceflinger.te \
         system_server.te
+
+ifeq ($(TARGET_PRODUCT),sdk)
+  # include an expanded selection of fonts for the SDK.
+  EXTENDED_FONT_FOOTPRINT := true
+endif
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index fe64bcb..06a7d8a 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
     hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
 
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 1493bd9..818f857 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -22,11 +22,39 @@
 TARGET_CPU_ABI := arm64-v8a
 
 TARGET_2ND_ARCH := arm
-TARGET_2ND_ARCH_VARIANT := armv7-a-neon
-TARGET_2ND_CPU_VARIANT := cortex-a15
 TARGET_2ND_CPU_ABI := armeabi-v7a
 TARGET_2ND_CPU_ABI2 := armeabi
 
+ifneq ($(TARGET_BUILD_APPS)$(filter cts,$(MAKECMDGOALS)),)
+# DO NOT USE
+# DO NOT USE
+#
+# This architecture / CPU variant must NOT be used for any 64 bit
+# platform builds. It is the lowest common denominator required
+# to build an unbundled application or cts for all supported 32 and 64 bit
+# platforms.
+#
+# If you're building a 64 bit platform (and not an application) the
+# ARM-v8 specification allows you to assume NEON and all the features
+# available in a cortex-A15 CPU. You should be able to set :
+#
+# TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+# TARGET_2ND_CPU_VARIANT := cortex-a15
+#
+# DO NOT USE
+# DO NOT USE
+TARGET_2ND_ARCH_VARIANT := armv7-a
+# DO NOT USE
+# DO NOT USE
+TARGET_2ND_CPU_VARIANT := generic
+# DO NOT USE
+# DO NOT USE
+else
+TARGET_2ND_ARCH_VARIANT := armv7-a-neon
+TARGET_2ND_CPU_VARIANT := cortex-a15
+endif
+
+
 TARGET_USES_64_BIT_BINDER := true
 
 # no hardware camera
@@ -48,10 +76,9 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 786432000
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 845427200
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 354fb2a..11a6a31 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
 
 PRODUCT_PACKAGES := \
diff --git a/target/board/generic_mips/device.mk b/target/board/generic_mips/device.mk
index b182c4c..590f422 100644
--- a/target/board/generic_mips/device.mk
+++ b/target/board/generic_mips/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
     hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
 
diff --git a/target/board/generic_mips64/BoardConfig.mk b/target/board/generic_mips64/BoardConfig.mk
index 7452978..f8cf4f0 100644
--- a/target/board/generic_mips64/BoardConfig.mk
+++ b/target/board/generic_mips64/BoardConfig.mk
@@ -24,15 +24,19 @@
 
 TARGET_ARCH := mips64
 ifeq (,$(TARGET_ARCH_VARIANT))
-TARGET_ARCH_VARIANT := mips64r2
+TARGET_ARCH_VARIANT := mips64r6
 endif
 TARGET_CPU_ABI  := mips64
 
 TARGET_2ND_ARCH := mips
 ifeq (,$(TARGET_2ND_ARCH_VARIANT))
+ifeq ($(TARGET_ARCH_VARIANT),mips64r6)
+TARGET_2ND_ARCH_VARIANT :=  mips32r6
+else
 TARGET_2ND_ARCH_VARIANT :=  mips32r2-fp
 endif
-TARGET_CPU_ABI  := mips
+endif
+TARGET_2ND_CPU_ABI  := mips
 
 # The emulator (qemu) uses the Goldfish devices
 HAVE_HTC_AUDIO_DRIVER := true
diff --git a/target/board/generic_mips64/device.mk b/target/board/generic_mips64/device.mk
index 58fe976..015686e 100644
--- a/target/board/generic_mips64/device.mk
+++ b/target/board/generic_mips64/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
     hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
 
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index 089f584..b5b0faf 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
 
 PRODUCT_PACKAGES := \
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index c597f7d..c4fd958 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -41,7 +41,7 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 880803840 # 840M
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 943718400 # 900MB
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index 089f584..b5b0faf 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -25,6 +25,9 @@
 PRODUCT_COPY_FILES := \
     device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
     device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+    frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
     device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
 
 PRODUCT_PACKAGES := \
diff --git a/target/board/vbox_x86/AndroidBoard.mk b/target/board/vbox_x86/AndroidBoard.mk
deleted file mode 100644
index 8b13789..0000000
--- a/target/board/vbox_x86/AndroidBoard.mk
+++ /dev/null
@@ -1 +0,0 @@
-
diff --git a/target/board/vbox_x86/BoardConfig.mk b/target/board/vbox_x86/BoardConfig.mk
deleted file mode 100644
index e5a1d3e..0000000
--- a/target/board/vbox_x86/BoardConfig.mk
+++ /dev/null
@@ -1,40 +0,0 @@
-#
-# IA target for VitualBox
-#
-
-TARGET_ARCH=x86
-TARGET_COMPRESS_MODULE_SYMBOLS := false
-TARGET_NO_RECOVERY := true
-TARGET_HARDWARE_3D := false
-BOARD_USES_GENERIC_AUDIO := true
-USE_CAMERA_STUB := true
-TARGET_CPU_ABI := x86
-TARGET_USERIMAGES_USE_EXT4 := true
-TARGET_BOOTIMAGE_USE_EXT2 := true
-BOARD_CACHEIMAGE_PARTITION_SIZE := 268435456
-BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
-
-# For VirtualBox and likely other emulators
-BOARD_INSTALLER_CMDLINE := init=/init console=ttyS0 console=tty0 vga=788 verbose
-BOARD_KERNEL_CMDLINE := init=/init qemu=1 console=tty0 vga=788 verbose androidboot.hardware=vbox_x86 androidboot.console=tty0 android.qemud=tty0
-TARGET_USE_DISKINSTALLER := true
-
-TARGET_DISK_LAYOUT_CONFIG := build/target/board/vbox_x86/disk_layout.conf
-BOARD_BOOTIMAGE_MAX_SIZE := 8388608
-BOARD_SYSLOADER_MAX_SIZE := 7340032
-BOARD_FLASH_BLOCK_SIZE := 512
-# 50M
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 52428800
-# 500M
-BOARD_INSTALLERIMAGE_PARTITION_SIZE := 524288000
-TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
-# Reserve 265M  for the system partition
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 268435456
-
-WITH_DEXPREOPT := false
-
-# The eth0 device should be started with dhcp on boot.
-# Useful for emulators that don't provide a wifi connection.
-NET_ETH0_STARTONBOOT := true
-
-ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.heapsize=32m
diff --git a/target/board/vbox_x86/README.txt b/target/board/vbox_x86/README.txt
deleted file mode 100644
index 568dc5f..0000000
--- a/target/board/vbox_x86/README.txt
+++ /dev/null
@@ -1,9 +0,0 @@
-The "vbox_x86" product defines a non-hardware-specific target intended
-to run on the VirtualBox emulator.
-
-Most of the Android devices (networking, phones, sound, etc) do not work.
-
-ADB via ethernet works with this target. You can use 'adb install' to
-test applications that do not require network, phone or sound support.
-This emulation is useful because VirtualBox runs much faster then does the
-QEMU emulators (at least until a KVM enabled QEMU emulator is available).
diff --git a/target/board/vbox_x86/device.mk b/target/board/vbox_x86/device.mk
deleted file mode 100644
index a44a87f..0000000
--- a/target/board/vbox_x86/device.mk
+++ /dev/null
@@ -1,36 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for the product aspects that
-# are specific to the emulator.
-
-LOCAL_PATH := $(call my-dir)
-
-PRODUCT_PROPERTY_OVERRIDES := \
-    ro.ril.hsxpa=1 \
-    ro.ril.gprsclass=10 \
-    ro.adb.qemud=1
-
-LOCAL_KERNEL := prebuilts/qemu-kernel/x86/kernel-vbox
-
-PRODUCT_COPY_FILES := \
-    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
-    device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
-    device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
-    build/target/board/vbox_x86/init.vbox_x86.rc:root/init.vbox_x86.rc \
-    $(LOCAL_KERNEL):kernel
-
-$(call inherit-product, frameworks/native/build/phone-xhdpi-1024-dalvik-heap.mk)
diff --git a/target/board/vbox_x86/disk_layout.conf b/target/board/vbox_x86/disk_layout.conf
deleted file mode 100644
index 12241ab..0000000
--- a/target/board/vbox_x86/disk_layout.conf
+++ /dev/null
@@ -1,76 +0,0 @@
-# Best to align all partion start/ends on a cylinder boundary (fdisk prefers it)
-# 512 bytes/sector
-# 63 sectors/track
-# 32 tracks/cylinder
-
-# LBAs are in 'k', so...
-# 1008 blocks (1k each) (1032192 bytes) / cylinder
-
-device {
-
-    path /dev/block/sda
-
-    scheme mbr
-
-    # bytes in a disk sector (== 1 LBA), must be a power of 2!
-    sector_size 512
-
-    # Start_lba should be on a cylindar boundary.
-    start_lba 63
-
-    # Autodetect disk size if == 0
-    num_lba 0
-
-    partitions {
-        # /dev/sdX1
-        sysloader {
-            active y
-            type linux
-            # 8 cyls in length... about 8M
-            len 8064
-        }
-
-        # /dev/sdX2
-        recovery {
-            active y
-            type linux
-            # 8 cyls in length... about 8M
-            len 8064
-        }
-
-        # /dev/sdX3
-        boot {
-            active y
-            type linux
-            # 8 cyls in length... about 8M
-            len 8064
-        }
-
-        # /dev/sdX4
-        # (extended partion begins)
-
-        # /dev/sdX5
-        cache {
-            type linux
-            len 512M
-        }
-
-        # /dev/sdX6
-        system {
-            type linux
-            len 512M
-        }
-
-        # /dev/sdX7
-        third_party {
-            type linux
-            len 512M
-        }
-
-        # /dev/sdX8
-        data {
-            type linux
-            len -1
-        }
-    }
-}
diff --git a/target/board/vbox_x86/init.vbox_x86.rc b/target/board/vbox_x86/init.vbox_x86.rc
deleted file mode 100644
index 15ca572..0000000
--- a/target/board/vbox_x86/init.vbox_x86.rc
+++ /dev/null
@@ -1,90 +0,0 @@
-on early-init
-    export EXTERNAL_STORAGE /mnt/sdcard
-    mkdir /mnt/sdcard 0000 system system
-    # for backwards compatibility
-    symlink /mnt/sdcard /sdcard
-
-on boot
-    setprop ARGH ARGH
-    setprop net.eth0.gw 10.0.2.2
-    setprop net.eth0.dns1 10.0.2.3
-    setprop net.gprs.local-ip 10.0.2.15
-    setprop ro.radio.use-ppp no
-    setprop ro.build.product generic
-    setprop ro.product.device generic
-
-# fake some battery state
-    setprop status.battery.state Slow
-    setprop status.battery.level 5
-    setprop status.battery.level_raw  50
-    setprop status.battery.level_scale 9
-
-# disable some daemons the emulator doesn't want
-    stop dund
-    stop akmd
-
-# start essential services
-    start qemud
-    start goldfish-logcat
-#   start goldfish-setup
-    start netcfg
-
-    setprop ro.setupwizard.mode EMULATOR
-
-on fs
-# mount sda (system) and sdb (data) partitions
-    mount ext4 /dev/block/sda6 /system
-    mount ext4 /dev/block/sda6 /system ro remount
-    mount ext4 /dev/block/sdb6 /data nosuid nodev
-    mount ext4 /dev/block/sdb7 /cache nosuid nodev
-
-# enable Google-specific location features,
-# like NetworkLocationProvider and LocationCollector
-    setprop ro.com.google.locationfeatures 1
-
-# For the emulator, which bypasses Setup Wizard, you can specify
-# account info for the device via these two properties.  Google
-# Login Service will insert these accounts into the database when
-# it is created (ie, after a data wipe).
-#
-#   setprop ro.config.hosted_account username@hosteddomain.org:password
-#   setprop ro.config.google_account username@gmail.com:password
-#
-# You MUST have a Google account on the device, and you MAY
-# additionally have a hosted account.  No other configuration is
-# supported, and arbitrary breakage may result if you specify
-# something else.
-
-service goldfish-setup /system/etc/init.goldfish.sh
-    user root
-    group root
-    oneshot
-
-# The qemu-props program is used to set various system
-# properties on boot. It must be run early during the boot
-# process to avoid race conditions with other daemons that
-# might read them (e.g. surface flinger), so define it in
-# class 'core'
-#
-service qemu-props /system/bin/qemu-props
-    class core
-    user root
-    group root
-    oneshot
-
-service qemud /system/bin/qemud
-    socket qemud    stream 666
-    oneshot
-
-# -Q is a special logcat option that forces the
-# program to check wether it runs on the emulator
-# if it does, it redirects its output to the device
-# named by the androidboot.console kernel option
-# if not, it simply exits immediately
-
-service goldfish-logcat /system/bin/logcat -Q
-    oneshot
-
-# Enable networking so that adb can connect
-service netcfg /system/bin/netcfg eth0 dhcp
-    oneshot
diff --git a/target/board/vbox_x86/system.prop b/target/board/vbox_x86/system.prop
deleted file mode 100644
index 137a0f9..0000000
--- a/target/board/vbox_x86/system.prop
+++ /dev/null
@@ -1,6 +0,0 @@
-#
-# system.prop for generic sdk
-#
-
-rild.libpath=/system/lib/libreference-ril.so
-rild.libargs=-d /dev/ttyS0
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index fc407d3..ac5902c 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -42,7 +42,8 @@
     $(LOCAL_DIR)/aosp_mips.mk \
     $(LOCAL_DIR)/full_mips.mk \
     $(LOCAL_DIR)/aosp_arm64.mk \
-    $(LOCAL_DIR)/aosp_mips64.mk
+    $(LOCAL_DIR)/aosp_mips64.mk \
+    $(LOCAL_DIR)/aosp_x86_64.mk
 else
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/core.mk \
@@ -59,9 +60,15 @@
     $(LOCAL_DIR)/aosp_mips64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
     $(LOCAL_DIR)/full_x86_64.mk \
-    $(LOCAL_DIR)/vbox_x86.mk \
+    $(LOCAL_DIR)/sdk_phone_armv7.mk \
+    $(LOCAL_DIR)/sdk_phone_x86.mk \
+    $(LOCAL_DIR)/sdk_phone_mips.mk \
+    $(LOCAL_DIR)/sdk_phone_arm64.mk \
+    $(LOCAL_DIR)/sdk_phone_x86_64.mk \
+    $(LOCAL_DIR)/sdk_phone_mips64.mk \
     $(LOCAL_DIR)/sdk.mk \
     $(LOCAL_DIR)/sdk_x86.mk \
     $(LOCAL_DIR)/sdk_mips.mk \
-    $(LOCAL_DIR)/large_emu_hw.mk
+    $(LOCAL_DIR)/sdk_arm64.mk \
+    $(LOCAL_DIR)/sdk_x86_64.mk
 endif
diff --git a/target/product/base.mk b/target/product/base.mk
index 4b8326d..9713330 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -18,6 +18,8 @@
 PRODUCT_PACKAGES += \
     20-dns.conf \
     95-configured \
+    appwidget \
+    appops \
     am \
     android.policy \
     android.test.runner \
@@ -30,8 +32,8 @@
     dhcpcd \
     dhcpcd-run-hooks \
     dnsmasq \
+    dpm \
     framework \
-    framework2 \
     fsck_msdos \
     ime \
     input \
@@ -41,13 +43,17 @@
     libandroid_servers \
     libaudioeffect_jni \
     libaudioflinger \
+    libaudiopolicyservice \
+    libaudiopolicymanager \
     libbundlewrapper \
     libcamera_client \
     libcameraservice \
     libdl \
+    libdrmclearkeyplugin \
     libeffectproxy \
     libeffects \
     libinput \
+    libinputflinger \
     libiprouteutil \
     libjnigraphics \
     libldnhncr \
@@ -55,8 +61,10 @@
     libmedia_jni \
     libmediaplayerservice \
     libmtp \
+    libnetd_client \
     libnetlink \
     libnetutils \
+    libpdfium \
     libreference-ril \
     libreverbwrapper \
     libril \
@@ -65,6 +73,8 @@
     libskia \
     libsonivox \
     libsoundpool \
+    libsoundtrigger \
+    libsoundtriggerservice \
     libsqlite \
     libstagefright \
     libstagefright_amrnb_common \
@@ -77,6 +87,8 @@
     libutils \
     libvisualizer \
     libvorbisidec \
+    libmediandk \
+    libwifi-service \
     media \
     media_cmd \
     mediaserver \
@@ -93,7 +105,6 @@
     racoon \
     run-as \
     schedtest \
-    screenshot \
     sdcard \
     services \
     settings \
diff --git a/target/product/core.mk b/target/product/core.mk
index 8c88b94..876a536 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -22,16 +22,36 @@
 PRODUCT_PACKAGES += \
     BasicDreams \
     Browser \
+    Calculator \
+    Calendar \
+    CalendarProvider \
+    CaptivePortalLogin \
+    CertInstaller \
     Contacts \
+    DeskClock \
     DocumentsUI \
     DownloadProviderUi \
+    Email \
+    Exchange2 \
     ExternalStorageProvider \
+    FusedLocation \
+    InputDevices \
     KeyChain \
+    Keyguard \
+    LatinIME \
+    Launcher2 \
+    ManagedProvisioning \
     PicoTts \
     PacProcessor \
     libpac \
+    PrintSpooler \
     ProxyHandler \
+    QuickSearchBox \
+    Settings \
     SharedStorageBackup \
-    VpnDialogs
+    Telecom \
+    TeleService \
+    VpnDialogs \
+    MmsService
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_base.mk)
diff --git a/target/product/core_base.mk b/target/product/core_base.mk
index d7d74bc..6c29482 100644
--- a/target/product/core_base.mk
+++ b/target/product/core_base.mk
@@ -30,14 +30,12 @@
     libandroidfw \
     libaudiopreprocessing \
     libaudioutils \
-    libbcc \
     libfilterpack_imageproc \
     libgabi++ \
     libmdnssd \
     libnfc_ndef \
     libpowermanager \
     libspeexresampler \
-    libstagefright_chromium_http \
     libstagefright_soft_aacdec \
     libstagefright_soft_aacenc \
     libstagefright_soft_amrdec \
@@ -48,9 +46,11 @@
     libstagefright_soft_gsmdec \
     libstagefright_soft_h264dec \
     libstagefright_soft_h264enc \
+    libstagefright_soft_hevcdec \
     libstagefright_soft_mp3dec \
     libstagefright_soft_mpeg4dec \
     libstagefright_soft_mpeg4enc \
+    libstagefright_soft_opusdec \
     libstagefright_soft_rawdec \
     libstagefright_soft_vorbisdec \
     libstagefright_soft_vpxdec \
@@ -58,26 +58,7 @@
     libvariablespeed \
     libwebrtc_audio_preprocessing \
     mdnsd \
-    mms-common \
     requestsync \
-    telephony-common \
-    voip-common
+    wifi-service
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_minimal.mk)
-# Override the PRODUCT_BOOT_JARS set in core_minimal.mk. The order matters.
-PRODUCT_BOOT_JARS := \
-    core-libart \
-    conscrypt \
-    okhttp \
-    core-junit \
-    bouncycastle \
-    ext \
-    framework \
-    framework2 \
-    telephony-common \
-    voip-common \
-    mms-common \
-    android.policy \
-    services \
-    apache-xml \
-    webviewchromium
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index c371368..4c08cb0 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -30,21 +30,28 @@
     PackageInstaller \
     SettingsProvider \
     Shell \
+    bcc \
     bu \
+    com.android.future.usb.accessory \
     com.android.location.provider \
     com.android.location.provider.xml \
     com.android.media.remotedisplay \
     com.android.media.remotedisplay.xml \
+    com.android.mediadrm.signer \
+    com.android.mediadrm.signer.xml \
     drmserver \
+    ethernet-service \
     framework-res \
     idmap \
     installd \
+    ims-common \
     ip \
     ip-up-vpn \
     ip6tables \
     iptables \
     keystore \
     keystore.default \
+    libbcc \
     libOpenMAXAL \
     libOpenSLES \
     libdownmix \
@@ -56,10 +63,17 @@
     libwilhelm \
     logd \
     make_ext4fs \
+    e2fsck \
+    resize2fs \
+    mms-common \
     screencap \
     sensorservice \
+    telephony-common \
     uiautomator \
-    webview
+    uncrypt \
+    voip-common \
+    webview \
+    wifi-service
 
 PRODUCT_COPY_FILES += \
     frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
@@ -73,11 +87,18 @@
     bouncycastle \
     ext \
     framework \
-    framework2 \
+    telephony-common \
+    voip-common \
+    ims-common \
+    mms-common \
     android.policy \
-    services \
     apache-xml \
-    webviewchromium
+
+# The order of PRODUCT_SYSTEM_SERVER_JARS matters.
+PRODUCT_SYSTEM_SERVER_JARS := \
+    services \
+    ethernet-service \
+    wifi-service
 
 PRODUCT_RUNTIMES := runtime_libart_default
 
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index 52244a3..d6dbe98 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -22,21 +22,12 @@
     ContactsProvider \
     CertInstaller \
     FusedLocation \
-    InputDevices \
-    bluetooth-health \
-    hostapd \
-    wpa_supplicant.conf
+    InputDevices
 
 PRODUCT_PACKAGES += \
-    audio \
     clatd \
     clatd.conf \
-    dhcpcd.conf \
-    network \
-    pand \
-    pppd \
-    sdptool \
-    wpa_supplicant
+    pppd
 
 PRODUCT_PACKAGES += \
     audio.primary.default \
@@ -57,6 +48,7 @@
     com.android.location.provider.xml \
     framework-res \
     installd \
+    ims-common \
     ip \
     ip-up-vpn \
     ip6tables \
@@ -73,42 +65,55 @@
     libdrmframework_jni \
     libdrmframework \
     make_ext4fs \
+    e2fsck \
+    resize2fs \
     nullwebview \
     screencap \
     sensorservice \
     uiautomator \
+    uncrypt \
     telephony-common \
     voip-common \
-    mms-common
+    logd \
+    mms-common \
+    wifi-service
 
 # The order matters
 PRODUCT_BOOT_JARS := \
-    core \
+    core-libart \
     conscrypt \
     okhttp \
     core-junit \
     bouncycastle \
     ext \
     framework \
-    framework2 \
     telephony-common \
     voip-common \
+    ims-common \
     mms-common \
     android.policy \
-    services \
     apache-xml \
-    nullwebview
+    nullwebview \
 
-PRODUCT_RUNTIMES := runtime_libdvm_default
+# The order of PRODUCT_SYSTEM_SERVER_JARS matters.
+PRODUCT_SYSTEM_SERVER_JARS := \
+    services \
+    wifi-service
+
+PRODUCT_RUNTIMES := runtime_libart_default
+
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+    ro.zygote=zygote32
+PRODUCT_COPY_FILES += \
+    system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
 
 PRODUCT_PROPERTY_OVERRIDES += \
     ro.carrier=unknown
 
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
 $(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
 
 # Overrides
 PRODUCT_BRAND := tiny
 PRODUCT_DEVICE := tiny
 PRODUCT_NAME := core_tiny
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index 059697e..9b1a826 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -22,11 +22,9 @@
 PRODUCT_PACKAGES := \
     libfwdlockengine \
     OpenWnn \
-    PinyinIME \
     libWnnEngDic \
     libWnnJpnDic \
     libwnndict \
-    VideoEditor \
     WAPPushManager
 
 PRODUCT_PACKAGES += \
diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk
index f98e9a2..2fd2ce8 100644
--- a/target/product/full_base_telephony.mk
+++ b/target/product/full_base_telephony.mk
@@ -27,7 +27,8 @@
     ro.com.android.dataroaming=true
 
 PRODUCT_COPY_FILES := \
-    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+    device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+    frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk
index 12797f4..0713db1 100644
--- a/target/product/generic_no_telephony.mk
+++ b/target/product/generic_no_telephony.mk
@@ -17,37 +17,18 @@
 # This is a generic phone product that isn't specialized for a specific device.
 # It includes the base Android platform.
 
-PRODUCT_POLICY := android.policy_phone
-
 PRODUCT_PACKAGES := \
-    DeskClock \
     Bluetooth \
-    Calculator \
-    Calendar \
     Camera2 \
-    CertInstaller \
-    Email \
-    Exchange2 \
-    FusedLocation \
     Gallery2 \
-    InputDevices \
-    Keyguard \
-    LatinIME \
-    Launcher2 \
     Music \
     MusicFX \
     OneTimeInitializer \
-    PrintSpooler \
     Provision \
-    QuickSearchBox \
-    Settings \
     SystemUI \
-    TeleService \
-    CalendarProvider \
     WallpaperCropper
 
 PRODUCT_PACKAGES += \
-    bcc \
     clatd \
     clatd.conf \
     pppd \
@@ -81,9 +62,9 @@
 $(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
 $(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
 $(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
 $(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
 $(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/sil-fonts/fonts.mk)
 $(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
 $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 4cddc06..030777e 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -21,4 +21,4 @@
 
 # These are all the locales that have translations and are displayable
 # by TextView in this branch.
-PRODUCT_LOCALES := en_US en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE
+PRODUCT_LOCALES := en_AU en_US en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ
diff --git a/target/product/large_emu_hw.mk b/target/product/large_emu_hw.mk
deleted file mode 100644
index a918c1d..0000000
--- a/target/product/large_emu_hw.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic product for devices with large display but not specialized
-# for a specific device. It includes the base Android platform.
-
-PRODUCT_POLICY := android.policy_mid
-
-PRODUCT_PACKAGES := \
-    CarHome \
-    DeskClock \
-    Bluetooth \
-    Calculator \
-    Calendar \
-    CertInstaller \
-    Email \
-    Exchange2 \
-    Gallery2 \
-    LatinIME \
-    Launcher2 \
-    Music \
-    Provision \
-    QuickSearchBox \
-    Settings \
-    Sync \
-    Updater \
-    CalendarProvider \
-    SyncProvider \
-    bluetooth-health \
-    hostapd \
-    wpa_supplicant.conf
-
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
-
-# Overrides
-PRODUCT_BRAND := generic
-PRODUCT_DEVICE := generic
-PRODUCT_NAME := large_emu_hw
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 6cb8818..e4200b3 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -20,12 +20,14 @@
         core-libart \
         libart \
         dex2oat \
-        oatdump
+        oatdump \
+        patchoat
 
 PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
     dalvik.vm.image-dex2oat-Xms=64m \
     dalvik.vm.image-dex2oat-Xmx=64m \
     dalvik.vm.dex2oat-Xms=64m \
     dalvik.vm.dex2oat-Xmx=512m \
+    ro.dalvik.vm.native.bridge=0 \
 
 include $(SRC_TARGET_DIR)/product/runtime_common.mk
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 5926f19..96d8cc9 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2007 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,170 +14,8 @@
 # limitations under the License.
 #
 
-PRODUCT_POLICY := android.policy_phone
-PRODUCT_PROPERTY_OVERRIDES :=
+# Don't modify this file - It's just an alias!
 
-PRODUCT_PACKAGES := \
-	Calculator \
-	DeskClock \
-	Email \
-	Exchange2 \
-	FusedLocation \
-	Gallery \
-	Keyguard \
-	Music \
-	Mms \
-	OpenWnn \
-	PrintSpooler \
-	libWnnEngDic \
-	libWnnJpnDic \
-	libwnndict \
-	TeleService \
-	PinyinIME \
-	Protips \
-	SoftKeyboard \
-	SystemUI \
-	Launcher2 \
-	Development \
-	DevelopmentSettings \
-	Fallback \
-	Settings \
-	SdkSetup \
-	CustomLocale \
-	sqlite3 \
-	InputDevices \
-	LatinIME \
-	CertInstaller \
-	LiveWallpapersPicker \
-	ApiDemos \
-	GestureBuilder \
-	CubeLiveWallpapers \
-	QuickSearchBox \
-	WidgetPreview \
-	librs_jni \
-	ConnectivityTest \
-	GpsLocationTest \
-	CalendarProvider \
-	Calendar \
-	SmokeTest \
-	SmokeTestApp \
-	rild \
-	LegacyCamera \
-	Dialer
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_armv7.mk)
 
-# Define the host tools and libs that are parts of the SDK.
--include sdk/build/product_sdk.mk
--include development/build/product_sdk.mk
-
-# audio libraries.
-PRODUCT_PACKAGES += \
-	audio.primary.goldfish \
-	audio_policy.default \
-	local_time.default
-
-PRODUCT_PACKAGE_OVERLAYS := development/sdk_overlay
-
-PRODUCT_COPY_FILES := \
-	device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
-	frameworks/base/data/sounds/effects/camera_click.ogg:system/media/audio/ui/camera_click.ogg \
-	frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
-	frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml \
-	device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
-	device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
-	frameworks/native/data/etc/android.hardware.touchscreen.multitouch.jazzhand.xml:system/etc/permissions/android.hardware.touchscreen.multitouch.jazzhand.xml \
-	frameworks/native/data/etc/android.hardware.camera.autofocus.xml:system/etc/permissions/android.hardware.camera.autofocus.xml \
-	frameworks/av/media/libeffects/data/audio_effects.conf:system/etc/audio_effects.conf \
-	hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
-$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
-$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
-$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/sil-fonts/fonts.mk)
-$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
-$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
-
-# Overrides
-PRODUCT_BRAND := generic
 PRODUCT_NAME := sdk
-PRODUCT_DEVICE := generic
-
-# locale + densities. en_US is both first and in alphabetical order to
-# ensure this is the default locale.
-PRODUCT_LOCALES = \
-	en_US \
-	ldpi \
-	hdpi \
-	mdpi \
-	xhdpi \
-	ar_EG \
-	ar_IL \
-	bg_BG \
-	ca_ES \
-	cs_CZ \
-	da_DK \
-	de_AT \
-	de_CH \
-	de_DE \
-	de_LI \
-	el_GR \
-	en_AU \
-	en_CA \
-	en_GB \
-	en_IE \
-	en_IN \
-	en_NZ \
-	en_SG \
-	en_US \
-	en_ZA \
-	es_ES \
-	es_US \
-	fi_FI \
-	fr_BE \
-	fr_CA \
-	fr_CH \
-	fr_FR \
-	he_IL \
-	hi_IN \
-	hr_HR \
-	hu_HU \
-	id_ID \
-	it_CH \
-	it_IT \
-	ja_JP \
-	ko_KR \
-	lt_LT \
-	lv_LV \
-	nb_NO \
-	nl_BE \
-	nl_NL \
-	pl_PL \
-	pt_BR \
-	pt_PT \
-	ro_RO \
-	ru_RU \
-	sk_SK \
-	sl_SI \
-	sr_RS \
-	sv_SE \
-	th_TH \
-	tl_PH \
-	tr_TR \
-	uk_UA \
-	vi_VN \
-	zh_CN \
-	zh_TW
-
-# include available languages for TTS in the system image
--include external/svox/pico/lang/PicoLangDeDeInSystem.mk
--include external/svox/pico/lang/PicoLangEnGBInSystem.mk
--include external/svox/pico/lang/PicoLangEnUsInSystem.mk
--include external/svox/pico/lang/PicoLangEsEsInSystem.mk
--include external/svox/pico/lang/PicoLangFrFrInSystem.mk
--include external/svox/pico/lang/PicoLangItItInSystem.mk
diff --git a/target/board/generic_mips/Android.mk b/target/product/sdk_arm64.mk
similarity index 73%
rename from target/board/generic_mips/Android.mk
rename to target/product/sdk_arm64.mk
index abf8d57..8bb38f4 100644
--- a/target/board/generic_mips/Android.mk
+++ b/target/product/sdk_arm64.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,8 @@
 # limitations under the License.
 #
 
-LOCAL_PATH := $(call my-dir)
+# Don't modify this file - It's just an alias!
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_arm64.mk)
+
+PRODUCT_NAME := sdk_arm64
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
new file mode 100644
index 0000000..8610169
--- /dev/null
+++ b/target/product/sdk_base.mk
@@ -0,0 +1,162 @@
+#
+# Copyright (C) 2007 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_PROPERTY_OVERRIDES :=
+
+PRODUCT_PACKAGES := \
+	ApiDemos \
+	CubeLiveWallpapers \
+	CustomLocale \
+	Development \
+	DevelopmentSettings \
+	Dialer \
+	EmulatorSmokeTests \
+	Fallback \
+	Gallery \
+	GestureBuilder \
+	LegacyCamera \
+	librs_jni \
+	libwnndict \
+	libWnnEngDic \
+	libWnnJpnDic \
+	LiveWallpapersPicker \
+	Mms \
+	Music \
+	OpenWnn \
+	Protips \
+	rild \
+	SdkSetup \
+	SmokeTest \
+	SmokeTestApp \
+	SoftKeyboard \
+	sqlite3 \
+	SystemUI \
+	WidgetPreview
+
+# Define the host tools and libs that are parts of the SDK.
+-include sdk/build/product_sdk.mk
+-include development/build/product_sdk.mk
+
+# audio libraries.
+PRODUCT_PACKAGES += \
+	audio.primary.goldfish \
+	audio_policy.default \
+	local_time.default
+
+PRODUCT_PACKAGE_OVERLAYS := development/sdk_overlay
+
+PRODUCT_COPY_FILES := \
+	device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+	frameworks/base/data/sounds/effects/camera_click.ogg:system/media/audio/ui/camera_click.ogg \
+	frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
+	frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml \
+	device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+	frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+	frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+	frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
+	device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
+	frameworks/native/data/etc/android.hardware.touchscreen.multitouch.jazzhand.xml:system/etc/permissions/android.hardware.touchscreen.multitouch.jazzhand.xml \
+	frameworks/native/data/etc/android.hardware.camera.autofocus.xml:system/etc/permissions/android.hardware.camera.autofocus.xml \
+	frameworks/av/media/libeffects/data/audio_effects.conf:system/etc/audio_effects.conf \
+	hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
+
+include $(SRC_TARGET_DIR)/product/emulator.mk
+
+$(call inherit-product-if-exists, frameworks/base/data/sounds/AllAudio.mk)
+$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/dancing-script/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/carrois-gothic-sc/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
+$(call inherit-product-if-exists, external/google-fonts/cutive-mono/fonts.mk)
+$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
+$(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
+$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
+$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
+
+# include available languages for TTS in the system image
+-include external/svox/pico/lang/PicoLangDeDeInSystem.mk
+-include external/svox/pico/lang/PicoLangEnGBInSystem.mk
+-include external/svox/pico/lang/PicoLangEnUsInSystem.mk
+-include external/svox/pico/lang/PicoLangEsEsInSystem.mk
+-include external/svox/pico/lang/PicoLangFrFrInSystem.mk
+-include external/svox/pico/lang/PicoLangItItInSystem.mk
+
+# locale + densities. en_US is both first and in alphabetical order to
+# ensure this is the default locale.
+PRODUCT_LOCALES := \
+	en_US \
+	ldpi \
+	hdpi \
+	mdpi \
+	xhdpi \
+	ar_EG \
+	ar_IL \
+	bg_BG \
+	ca_ES \
+	cs_CZ \
+	da_DK \
+	de_AT \
+	de_CH \
+	de_DE \
+	de_LI \
+	el_GR \
+	en_AU \
+	en_CA \
+	en_GB \
+	en_IE \
+	en_IN \
+	en_NZ \
+	en_SG \
+	en_US \
+	en_ZA \
+	es_ES \
+	es_US \
+	fi_FI \
+	fr_BE \
+	fr_CA \
+	fr_CH \
+	fr_FR \
+	he_IL \
+	hi_IN \
+	hr_HR \
+	hu_HU \
+	id_ID \
+	it_CH \
+	it_IT \
+	ja_JP \
+	ko_KR \
+	lt_LT \
+	lv_LV \
+	nb_NO \
+	nl_BE \
+	nl_NL \
+	pl_PL \
+	pt_BR \
+	pt_PT \
+	ro_RO \
+	ru_RU \
+	sk_SK \
+	sl_SI \
+	sr_RS \
+	sv_SE \
+	th_TH \
+	tl_PH \
+	tr_TR \
+	uk_UA \
+	vi_VN \
+	zh_CN \
+	zh_TW
diff --git a/target/product/sdk_mips.mk b/target/product/sdk_mips.mk
index 2072400..366994a 100644
--- a/target/product/sdk_mips.mk
+++ b/target/product/sdk_mips.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2012 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,15 +14,8 @@
 # limitations under the License.
 #
 
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
+# Don't modify this file - It's just an alias!
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_mips.mk)
 
-# Overrides
-PRODUCT_BRAND := generic_mips
 PRODUCT_NAME := sdk_mips
-PRODUCT_DEVICE := generic_mips
-PRODUCT_MODEL := Android SDK for Mips
diff --git a/target/product/vbox_x86.mk b/target/product/sdk_phone_arm64.mk
similarity index 66%
rename from target/product/vbox_x86.mk
rename to target/product/sdk_phone_arm64.mk
index a7d1b65..c501f14 100644
--- a/target/product/vbox_x86.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -18,19 +18,12 @@
 # Open-Source part of the tree. It's geared toward a US-centric
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/vbox_x86/device.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
-PRODUCT_PACKAGES += \
-       camera.vbox_x86 \
-       lights.vbox_x86 \
-       gps.vbox_x86 \
-       sensors.vbox_x86
-
-PRODUCT_NAME := vbox_x86
-PRODUCT_DEVICE := vbox_x86
-PRODUCT_MODEL := Full Android on x86 VirtualBox
+# Overrides
+PRODUCT_BRAND := generic_arm64
+PRODUCT_NAME := sdk_phone_arm64
+PRODUCT_DEVICE := generic_arm64
+PRODUCT_MODEL := Android SDK built for arm64
diff --git a/target/board/generic_mips/Android.mk b/target/product/sdk_phone_armv7.mk
similarity index 71%
copy from target/board/generic_mips/Android.mk
copy to target/product/sdk_phone_armv7.mk
index abf8d57..aeb4940 100644
--- a/target/board/generic_mips/Android.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2007 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,9 @@
 # limitations under the License.
 #
 
-LOCAL_PATH := $(call my-dir)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
+
+# Overrides
+PRODUCT_BRAND := generic
+PRODUCT_NAME := sdk_phone_armv7
+PRODUCT_DEVICE := generic
diff --git a/target/product/vbox_x86.mk b/target/product/sdk_phone_mips.mk
similarity index 62%
copy from target/product/vbox_x86.mk
copy to target/product/sdk_phone_mips.mk
index a7d1b65..818491f 100644
--- a/target/product/vbox_x86.mk
+++ b/target/product/sdk_phone_mips.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2009 The Android Open Source Project
+# Copyright (C) 2012 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -18,19 +18,11 @@
 # Open-Source part of the tree. It's geared toward a US-centric
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/vbox_x86/device.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
-PRODUCT_PACKAGES += \
-       camera.vbox_x86 \
-       lights.vbox_x86 \
-       gps.vbox_x86 \
-       sensors.vbox_x86
-
-PRODUCT_NAME := vbox_x86
-PRODUCT_DEVICE := vbox_x86
-PRODUCT_MODEL := Full Android on x86 VirtualBox
+# Overrides
+PRODUCT_BRAND := generic_mips
+PRODUCT_NAME := sdk_phone_mips
+PRODUCT_DEVICE := generic_mips
+PRODUCT_MODEL := Android SDK for Mips
diff --git a/target/product/vbox_x86.mk b/target/product/sdk_phone_mips64.mk
similarity index 66%
copy from target/product/vbox_x86.mk
copy to target/product/sdk_phone_mips64.mk
index a7d1b65..afdb2a8 100644
--- a/target/product/vbox_x86.mk
+++ b/target/product/sdk_phone_mips64.mk
@@ -18,19 +18,12 @@
 # Open-Source part of the tree. It's geared toward a US-centric
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/vbox_x86/device.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
-PRODUCT_PACKAGES += \
-       camera.vbox_x86 \
-       lights.vbox_x86 \
-       gps.vbox_x86 \
-       sensors.vbox_x86
-
-PRODUCT_NAME := vbox_x86
-PRODUCT_DEVICE := vbox_x86
-PRODUCT_MODEL := Full Android on x86 VirtualBox
+# Overrides
+PRODUCT_BRAND := generic_mips64
+PRODUCT_NAME := sdk_phone_mips64
+PRODUCT_DEVICE := generic_mips64
+PRODUCT_MODEL := Android SDK built for mips64
diff --git a/target/product/vbox_x86.mk b/target/product/sdk_phone_x86.mk
similarity index 66%
copy from target/product/vbox_x86.mk
copy to target/product/sdk_phone_x86.mk
index a7d1b65..95c49ab 100644
--- a/target/product/vbox_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -18,19 +18,11 @@
 # Open-Source part of the tree. It's geared toward a US-centric
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/vbox_x86/device.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
-PRODUCT_PACKAGES += \
-       camera.vbox_x86 \
-       lights.vbox_x86 \
-       gps.vbox_x86 \
-       sensors.vbox_x86
-
-PRODUCT_NAME := vbox_x86
-PRODUCT_DEVICE := vbox_x86
-PRODUCT_MODEL := Full Android on x86 VirtualBox
+# Overrides
+PRODUCT_BRAND := generic_x86
+PRODUCT_NAME := sdk_phone_x86
+PRODUCT_DEVICE := generic_x86
+PRODUCT_MODEL := Android SDK built for x86
diff --git a/target/product/vbox_x86.mk b/target/product/sdk_phone_x86_64.mk
similarity index 66%
copy from target/product/vbox_x86.mk
copy to target/product/sdk_phone_x86_64.mk
index a7d1b65..69e37af 100644
--- a/target/product/vbox_x86.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -18,19 +18,12 @@
 # Open-Source part of the tree. It's geared toward a US-centric
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/vbox_x86/device.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
-PRODUCT_PACKAGES += \
-       camera.vbox_x86 \
-       lights.vbox_x86 \
-       gps.vbox_x86 \
-       sensors.vbox_x86
-
-PRODUCT_NAME := vbox_x86
-PRODUCT_DEVICE := vbox_x86
-PRODUCT_MODEL := Full Android on x86 VirtualBox
+# Overrides
+PRODUCT_BRAND := generic_x86_64
+PRODUCT_NAME := sdk_phone_x86_64
+PRODUCT_DEVICE := generic_x86_64
+PRODUCT_MODEL := Android SDK built for x86_64
diff --git a/target/product/sdk_x86.mk b/target/product/sdk_x86.mk
index 873d0c0..13ee57d 100644
--- a/target/product/sdk_x86.mk
+++ b/target/product/sdk_x86.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2009 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,15 +14,8 @@
 # limitations under the License.
 #
 
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
+# Don't modify this file - It's just an alias!
 
-include $(SRC_TARGET_DIR)/product/sdk.mk
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_x86.mk)
 
-# Overrides
-PRODUCT_BRAND := generic_x86
 PRODUCT_NAME := sdk_x86
-PRODUCT_DEVICE := generic_x86
-PRODUCT_MODEL := Android SDK built for x86
diff --git a/target/board/generic_mips/Android.mk b/target/product/sdk_x86_64.mk
similarity index 72%
copy from target/board/generic_mips/Android.mk
copy to target/product/sdk_x86_64.mk
index abf8d57..5f6553e 100644
--- a/target/board/generic_mips/Android.mk
+++ b/target/product/sdk_x86_64.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,8 @@
 # limitations under the License.
 #
 
-LOCAL_PATH := $(call my-dir)
+# Don't modify this file - It's just an alias!
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_x86_64.mk)
+
+PRODUCT_NAME := sdk_x86_64
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
new file mode 100644
index 0000000..5a40397
--- /dev/null
+++ b/target/product/security/Android.mk
@@ -0,0 +1,12 @@
+LOCAL_PATH:= $(call my-dir)
+
+#######################################
+# verity_key
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := verity_key
+LOCAL_SRC_FILES := $(LOCAL_MODULE)
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
+
+include $(BUILD_PREBUILT)
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
new file mode 100644
index 0000000..8db965f
--- /dev/null
+++ b/target/product/security/verity_key
Binary files differ
diff --git a/target/product/security/verity_private_dev_key b/target/product/security/verity_private_dev_key
new file mode 100644
index 0000000..92528e9
--- /dev/null
+++ b/target/product/security/verity_private_dev_key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDQxdVrH2RB1eg5
+17/gBmLzW1Ds10RG6ctNZMhxppMOLnEZViKGv1VNRhxqK/JKTv2UujgZ94SJcDub
+G+DwAwaGZKQqDYUa0VU2cng8TYPcnYGPdJ7Usckp6tdg64vns7e+VVf0dOyEovR+
+JyeYUz05OhUMYP9xJIhpA2XnXe5Ekb9iTFSYo9uBpoXDD4IY7aOqUxSbv9wMtyIp
+dl+oTm0+kqRRi4KoxGHV0CzDseEUuWG/Kp/7aVF9Sg45NcC6KYvrGysUKA+Bt09O
+feDn/HRpT9SfRElJa5DRms33UBUtnom15F4yd4vvFgubB0nkPOUuwfZhTFfgeuY4
+H2bHkjKbAgMBAAECggEAMpFYqkPGQvQO9cO+ZALoAM4Dgfp6PTrv1WUt7+lLAUpa
+dqqYXk8F2Fu9EjJm03ziix237QI5Bhk7Nsy/5SK2d+L0qILx1JcTrsZ3PRQBdnRo
+J1k2B4qwkQii9oTXNF4hiWaekUWo7E+ULOJLAuhWkf/xjTgJZ1xT9iuuiSYFSnIa
+9ABNH0vCaKEkW/4ri6fdtXmO26C/ltJlnozl86x07PIFh4uBas7/40E8ykFP00CS
+zdhMh+2DGyCb1Q0eJ1IfGILNatkLNEd2BHgQ7qNBkN9yShZfhvIPblr5gSUlZplX
+diV20ZGLAfByKWgZZWKkwl9KzaisL/J/4dr2UlSVEQKBgQDxAYTsgoTkkP0TKzr3
+i3ljT8OuVOj6TwZVBJYe2MIJ3veivS3gWB53FpsKthbib7y8ifIakn15mQkNCK5R
+7H7F5lvZCNnB6shY5Dz7nLJxKLALcAg+d12l3gTbFQeFDs0iQQJF7P8hs/GPF7kY
+Layb7EF0uzYjyHJCKtFdaZaeZwKBgQDdwvCb7NJVeGTcE97etL+8acu9y4GlqKEF
+o0Vkw8TjNKj/KuDkbkAk9hXxU1ZCmDU3y6r8CVHYl0Sqh08plEhkYB/j3sFy81zY
+3xu/rLFysBwjeJHHlPjRTYkdKr9pABmm8NIEShvu9u8i+mpOhjbX72HxZL+i4Fou
+gz58wEdBrQKBgG8CfyKdn+7UJe3tbLTXRquK8xxauhGJ0uXYPfmpZ/8596C7OOVs
+UWQTQoj1hKb6RtolRCIfNbKL3hJl3D2aDG7Fg6r9m6fpqCzhvIE9FShwUF6EVRfI
+zZb4JA5xqkwMnEpZ3V0uI/p3Mx3xFG3ho+8SLLhC/1YOHysBI/y+BQWjAoGAYiqQ
+PkXYWhOAeleleeqDUdF3al3y1zVNimRbLJ7owjcmdEYz5YrUhEgXMIvWjIY6UKes
+2gL6IynbMK3TIjHM1fojQ8jw04TdXfdtnizBJGbHHgCab8IHXwe2oZ2xu7ZapKbI
+ITP5J5BSDabSdk49attB/Qy/NEeiRCK+/5RSNsUCgYAg6vX9VqMEkhPHeoFfdLGD
+EQPPN6QLrQ4Zif0GKxH96znNSv0rXdNp9t0kyapdgzMuCwIEuOkCSiKgmfjTWnYO
+qh5HMUuD2VbfWwI9jVujQMRmqiaFF7VxxA1bP5j1hJlI6cn1Fjlpi+NsNZN4nm3Q
+92SEwX2vDgjrU0NAtFFL1Q==
+-----END PRIVATE KEY-----
diff --git a/target/board/generic_mips/Android.mk b/target/product/verity.mk
similarity index 67%
copy from target/board/generic_mips/Android.mk
copy to target/product/verity.mk
index abf8d57..4a1ca5e 100644
--- a/target/board/generic_mips/Android.mk
+++ b/target/product/verity.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2011 The Android Open Source Project
+# Copyright (C) 2014 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,4 +14,10 @@
 # limitations under the License.
 #
 
-LOCAL_PATH := $(call my-dir)
+# Provides dependencies necessary for verified boot
+
+PRODUCT_SUPPORTS_VERITY := true
+PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity_private_dev_key
+
+PRODUCT_PACKAGES += \
+        verity_key
diff --git a/tools/atree/atree.cpp b/tools/atree/atree.cpp
index 2ba284f..b134e01 100644
--- a/tools/atree/atree.cpp
+++ b/tools/atree/atree.cpp
@@ -90,6 +90,26 @@
     }
 }
 
+// Escape the filename so that it can be added to the makefile properly.
+static string
+escape_filename(const string name)
+{
+    ostringstream new_name;
+    for (string::const_iterator iter = name.begin(); iter != name.end(); ++iter)
+    {
+        switch (*iter)
+        {
+            case '$':
+                new_name << "$$";
+                break;
+            default:
+                new_name << *iter;
+                break;
+        }
+    }
+    return new_name.str();
+}
+
 int
 main(int argc, char* const* argv)
 {
@@ -324,7 +344,8 @@
             for (vector<FileRecord>::iterator it=files.begin();
                                 it!=files.end(); it++) {
                 if (!it->sourceIsDir) {
-                    fprintf(f, "%s \\\n", it->sourcePath.c_str());
+                    fprintf(f, "%s \\\n",
+                            escape_filename(it->sourcePath).c_str());
                 }
             }
             fprintf(f, "\n");
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 46a73f8..ed6bd87 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -8,6 +8,7 @@
 echo "ro.build.version.incremental=$BUILD_NUMBER"
 echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
 echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
+echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
 echo "ro.build.version.release=$PLATFORM_VERSION"
 echo "ro.build.date=`date`"
 echo "ro.build.date.utc=`date +%s`"
@@ -46,9 +47,12 @@
 echo "# ro.build.product is obsolete; use ro.product.device"
 echo "ro.build.product=$TARGET_DEVICE"
 
-echo "# Do not try to parse ro.build.description or .fingerprint"
+echo "# Do not try to parse description, fingerprint, or thumbprint"
 echo "ro.build.description=$PRIVATE_BUILD_DESC"
 echo "ro.build.fingerprint=$BUILD_FINGERPRINT"
+if [ -n "$BUILD_THUMBPRINT" ] ; then
+  echo "ro.build.thumbprint=$BUILD_THUMBPRINT"
+fi
 echo "ro.build.characteristics=$TARGET_AAPT_CHARACTERISTICS"
 
 echo "# end build properties"
diff --git a/tools/droiddoc/templates-ds/package.cs b/tools/droiddoc/templates-ds/package.cs
index ea3e4f4..d67d5d9 100644
--- a/tools/droiddoc/templates-ds/package.cs
+++ b/tools/droiddoc/templates-ds/package.cs
@@ -45,6 +45,7 @@
   <?cs /if ?>
 <?cs /def ?>
 
+<?cs call:class_table("Annotations", package.annotations) ?>
 <?cs call:class_table("Interfaces", package.interfaces) ?>
 <?cs call:class_table("Classes", package.classes) ?>
 <?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-pdk/assets/images/android-partner-logo.png b/tools/droiddoc/templates-pdk/assets/images/android-partner-logo.png
new file mode 100644
index 0000000..dac0991
--- /dev/null
+++ b/tools/droiddoc/templates-pdk/assets/images/android-partner-logo.png
Binary files differ
diff --git a/tools/droiddoc/templates-pdk/components/masthead.cs b/tools/droiddoc/templates-pdk/components/masthead.cs
index a581618..05437f3 100644
--- a/tools/droiddoc/templates-pdk/components/masthead.cs
+++ b/tools/droiddoc/templates-pdk/components/masthead.cs
@@ -2,8 +2,8 @@
 def:custom_masthead() ?>
   <div id="header">
       <div id="headerLeft">
-          <a href="<?cs var:toroot ?>guide/getting_started.html"><img
-              src="<?cs var:toroot ?>assets/images/open_source.png" alt="Android Open Source Project" /></a>
+          <a href="<?cs var:toroot ?>guide/index.html"><img
+              src="<?cs var:toroot ?>assets/images/android-partner-logo.png" alt="Android Platform Development Kit" /></a>
        
       </div>
       <div id="headerRight">
diff --git a/tools/droiddoc/templates-sac/assets/css/default.css b/tools/droiddoc/templates-sac/assets/css/default.css
index 0d9e601..c1a0c19 100644
--- a/tools/droiddoc/templates-sac/assets/css/default.css
+++ b/tools/droiddoc/templates-sac/assets/css/default.css
@@ -1016,12 +1016,13 @@
 }
 h1 {
     color:#333;
-    font-size: 22px;
-    margin: 20px 0 20px;
+    font-size: 34px;
+    margin: 36px 0 27px;
     padding:0 0 10px;
+    font-weight:300;
 }
 h1, h2 {
-    line-height: 32px;
+    line-height: 30px;
 }
 h1.short {
   margin-right:320px;
@@ -1030,35 +1031,41 @@
   margin-right:320px;
 }
 h1.super {
-    font-size: 37px;  
+    font-size: 37px;
 }
 h2 {
     color:#333;
-    font-size: 20px;
-    margin: 20px 0 20px;
+    font-size: 26px;
+    margin: 32px 0 20px;
     padding:0;
+    font-weight:300;
 }
 h3 {
     color:#333;
-    font-size: 18px;
+    font-size: 21px;
+    font-weight:400;
+    margin:21px 0 14px 0;
 }
 h3, h4 {
-    color:#333;
-    line-height: 20px;
-    margin: 10px 0;
+    line-height: 21px;
 }
 h4 {
-  font-size: 16px;
+  font-size: 18px;
+  margin: 12px 0;
+  font-weight:500;
 }
 h5 {
-  font-size: 14px;  
+  font-size: 14px;
 }
 h5, h6 {
   margin: 5px 0;
 }
 h6 {
-  font-size: 12px;  
+  font-size: 12px;
 }
+
+
+
 hr { /* applied to the bottom of h2 elements */
   height: 1px;
   margin: 5px 0 20px;
diff --git a/tools/droiddoc/templates-sac/assets/images/sac_logo.png b/tools/droiddoc/templates-sac/assets/images/sac_logo.png
index 54b9a4c..4ad113c 100644
--- a/tools/droiddoc/templates-sac/assets/images/sac_logo.png
+++ b/tools/droiddoc/templates-sac/assets/images/sac_logo.png
Binary files differ
diff --git a/tools/droiddoc/templates-sac/assets/images/sac_logo@2x.png b/tools/droiddoc/templates-sac/assets/images/sac_logo@2x.png
new file mode 100644
index 0000000..4040f3f
--- /dev/null
+++ b/tools/droiddoc/templates-sac/assets/images/sac_logo@2x.png
Binary files differ
diff --git a/tools/droiddoc/templates-sac/components/masthead.cs b/tools/droiddoc/templates-sac/components/masthead.cs
index fb9b71d..a8618c0 100644
--- a/tools/droiddoc/templates-sac/components/masthead.cs
+++ b/tools/droiddoc/templates-sac/components/masthead.cs
@@ -253,7 +253,9 @@
         <div class="wrap" id="header-wrap">
           <div class="col-3 saclogo">
           <a href="<?cs var:toroot ?>index.html">
-            <img src="<?cs var:toroot ?>assets/images/sac_logo.png" width="114" height="16" alt="Android Developers" />
+            <img src="<?cs var:toroot ?>assets/images/sac_logo.png"
+                srcset="<?cs var:toroot ?>assets/images/sac_logo@2x.png 2x"
+                width="123" height="25" alt="Android Developers" />
           </a>
           </div>
             <ul class="nav-x col-9">
diff --git a/tools/droiddoc/templates-sac/package.cs b/tools/droiddoc/templates-sac/package.cs
index 99eaff2..abd49f1 100644
--- a/tools/droiddoc/templates-sac/package.cs
+++ b/tools/droiddoc/templates-sac/package.cs
@@ -45,6 +45,7 @@
   <?cs /if ?>
 <?cs /def ?>
 
+<?cs call:class_table("Annotations", package.annotations) ?>
 <?cs call:class_table("Interfaces", package.interfaces) ?>
 <?cs call:class_table("Classes", package.classes) ?>
 <?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sdk/assets/css/default.css b/tools/droiddoc/templates-sdk/assets/css/default.css
index dc6166a..96259a7 100644
--- a/tools/droiddoc/templates-sdk/assets/css/default.css
+++ b/tools/droiddoc/templates-sdk/assets/css/default.css
@@ -1246,7 +1246,7 @@
 legend {
     display: none;
 }
-a:link, a:visited {
+a:link, a:visited, .link-color {
   color: #258aaf;
   text-decoration: none;
 }
@@ -4206,7 +4206,7 @@
   z-index: 52;
 }
 
-/* offset the <a name=""> tags in reference to account for sticky nav */
+/* offset the <a name=""> tags to account for sticky nav */
 body.reference a[name] {
   visibility: hidden;
   display: block;
@@ -7341,3 +7341,52 @@
   line-height: 54px;
   text-align: center;
 }
+
+.annotation-message {
+    display: block;
+    font-style: italic;
+    color: #F80;
+}
+
+
+
+/* Helpouts widget */
+.resource-card-6x2.helpouts-card {
+  width: 220px;
+  height: 40px;
+  position:absolute;
+  z-index:999;
+  top:-8px;
+  right:1px;
+}
+
+.resource-card-6x2.helpouts-card > .card-info {
+  left:35px;
+  height:35px;
+  padding:4px 8px 4px 0;
+}
+
+.resource-card-6x2.helpouts-card > .card-info .helpouts-description {
+  display:block;
+  overflow:visible;
+  font-size:12px;
+  line-height:12px;
+  text-align:right;
+  color:#666;
+}
+
+.helpouts-description .link-color {
+  text-transform: uppercase;
+}
+
+.resource-card-6x2 > .card-bg.helpouts-card-bg {
+  width:35px;
+  height:35px;
+  margin:2px 0 0 0;
+  background-image: url(../images/styles/helpouts-logo-35_2x.png);
+  background-image: -webkit-image-set(url(../images/styles/helpouts-logo-35.png) 1x, url(../images/styles/helpouts-logo-35_2x.png) 2x);
+}
+
+.resource-card-6x2 > .card-bg.helpouts-card-bg:after {
+  display:none;
+}
\ No newline at end of file
diff --git a/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35.png b/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35.png
new file mode 100644
index 0000000..3c2dc1a
--- /dev/null
+++ b/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35.png
Binary files differ
diff --git a/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35_2x.png b/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35_2x.png
new file mode 100644
index 0000000..e34be2e
--- /dev/null
+++ b/tools/droiddoc/templates-sdk/assets/images/styles/helpouts-logo-35_2x.png
Binary files differ
diff --git a/tools/droiddoc/templates-sdk/assets/js/docs.js b/tools/droiddoc/templates-sdk/assets/js/docs.js
index 2824a3d..8daef7f 100644
--- a/tools/droiddoc/templates-sdk/assets/js/docs.js
+++ b/tools/droiddoc/templates-sdk/assets/js/docs.js
@@ -900,16 +900,14 @@
   return 0;
 }
 
-function writeCookie(cookie, val, section, expiration) {
+function writeCookie(cookie, val, section, age) {
   if (val==undefined) return;
   section = section == null ? "_" : "_"+section+"_";
-  if (expiration == null) {
-    var date = new Date();
-    date.setTime(date.getTime()+(10*365*24*60*60*1000)); // default expiration is one week
-    expiration = date.toGMTString();
+  if (age == null) {
+    var age = 2*365*24*60*60; // set max-age to 2 years
   }
   var cookieValue = cookie_namespace + section + cookie + "=" + val
-                    + "; expires=" + expiration+"; path=/";
+                    + "; max-age=" + age +"; path=/";
   document.cookie = cookieValue;
 }
 
@@ -1988,7 +1986,7 @@
 
 
         // Search for matching JD docs
-        if (text.length >= 3) {
+        if (text.length >= 2) {
           // Regex to match only the beginning of a word
           var textRegex = new RegExp("\\b" + text.toLowerCase(), "g");
 
@@ -2538,6 +2536,9 @@
 /* Adjust the scroll position to account for sticky header, only if the hash matches an id.
    This does not handle <a name=""> tags. Some CSS fixes those, but only for reference docs. */
 function offsetScrollForSticky() {
+  // Ignore if there's no search bar (some special pages have no header)
+  if ($("#search-container").length < 1) return;
+
   var hash = escape(location.hash.substr(1));
   var $matchingElement = $("#"+hash);
   // Sanity check that there's an element with that ID on the page
@@ -2552,6 +2553,9 @@
 
 // when an event on the browser history occurs (back, forward, load) requery hash and do search
 $(window).hashchange( function(){
+  // Ignore if there's no search bar (some special pages have no header)
+  if ($("#search-container").length < 1) return;
+
   // If the hash isn't a search query or there's an error in the query,
   // then adjust the scroll position to account for sticky header, then exit.
   if ((location.hash.indexOf("q=") == -1) || (query == "undefined")) {
diff --git a/tools/droiddoc/templates-sdk/class.cs b/tools/droiddoc/templates-sdk/class.cs
index b6ec19f..7aa99f9 100644
--- a/tools/droiddoc/templates-sdk/class.cs
+++ b/tools/droiddoc/templates-sdk/class.cs
@@ -1,5 +1,6 @@
 <?cs include:"doctype.cs" ?>
 <?cs include:"macros.cs" ?>
+<?cs include:"macros_override.cs" ?>
 <html<?cs if:devsite ?> devsite<?cs /if ?>>
 <?cs include:"head_tag.cs" ?>
 <body class="gc-documentation <?cs if:(reference.gms || reference.gcm) ?>google<?cs /if ?>
@@ -125,6 +126,7 @@
   <?cs /if ?>
   <?cs set:colspan = colspan-1 ?>
 <?cs /each ?>
+<?cs call:show_annotations_list(class) ?>
 
 </div><!-- end header -->
 
@@ -195,7 +197,10 @@
         <td class="jd-linkcol" width="100%"><nobr>
         <span class="sympad"><?cs call:cond_link(method.name, toroot, method.href, included) ?></span>(<?cs call:parameter_list(method.params) ?>)</nobr>
         <?cs if:subcount(method.shortDescr) || subcount(method.deprecated) ?>
-        <div class="jd-descrdiv"><?cs call:short_descr(method) ?></div>
+        <div class="jd-descrdiv">
+          <?cs call:short_descr(method) ?>
+          <?cs call:show_annotations_list(method) ?>
+        </div>
   <?cs /if ?>
   </td></tr>
 <?cs set:count = count + #1 ?>
@@ -212,7 +217,10 @@
           <?cs var:field.final ?>
           <?cs call:type_link(field.type) ?></nobr></td>
           <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, included) ?></td>
-          <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?></td>
+          <td class="jd-descrcol" width="100%">
+            <?cs call:short_descr(field) ?>
+            <?cs call:show_annotations_list(field) ?>
+          </td>
       </tr>
       <?cs set:count = count + #1 ?>
     <?cs /each ?>
@@ -224,7 +232,10 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:field.since ?>" >
         <td class="jd-typecol"><?cs call:type_link(field.type) ?></td>
         <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, included) ?></td>
-        <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?></td>
+        <td class="jd-descrcol" width="100%">
+          <?cs call:short_descr(field) ?>
+          <?cs call:show_annotations_list(field) ?>
+        </td>
     </tr>
     <?cs set:count = count + #1 ?>
     <?cs /each ?>
@@ -244,7 +255,10 @@
             <?cs call:cond_link(m.name, toroot, m.href, included) ?>
             <?cs /each ?>
         </td>
-        <td class="jd-descrcol" width="100%"><?cs call:short_descr(attr) ?>&nbsp;</td>
+        <td class="jd-descrcol" width="100%">
+          <?cs call:short_descr(attr) ?>&nbsp;
+          <?cs call:show_annotations_list(attr) ?>
+        </td>
     </tr>
     <?cs set:count = count + #1 ?>
     <?cs /each ?>
@@ -261,7 +275,10 @@
         <?cs var:cl.abstract ?>
         <?cs var:cl.kind ?></nobr></td>
       <td class="jd-linkcol"><?cs call:type_link(cl.type) ?></td>
-      <td class="jd-descrcol" width="100%"><?cs call:short_descr(cl) ?>&nbsp;</td>
+      <td class="jd-descrcol" width="100%">
+        <?cs call:short_descr(cl) ?>&nbsp;
+        <?cs call:show_annotations_list(cl) ?>
+      </td>
     </tr>
     <?cs set:count = count + #1 ?>
     <?cs /each ?>
@@ -337,7 +354,10 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:field.since ?>" >
         <td class="jd-descrcol"><?cs call:type_link(field.type) ?>&nbsp;</td>
         <td class="jd-linkcol"><?cs call:cond_link(field.name, toroot, field.href, cl.included) ?>&nbsp;</td>
-        <td class="jd-descrcol" width="100%"><?cs call:short_descr(field) ?>&nbsp;</td>
+        <td class="jd-descrcol" width="100%">
+          <?cs call:short_descr(field) ?>&nbsp;
+          <?cs call:show_annotations_list(field) ?>
+        </td>
     </tr>
     <?cs set:count = count + #1 ?>
     <?cs /each ?>
@@ -506,6 +526,7 @@
         <?cs call:federated_refs(field) ?>
       </div>
     <div class="jd-details-descr">
+      <?cs call:show_annotations_list(field) ?>
       <?cs call:description(field) ?>
     <?cs if:subcount(field.constantValue) ?>
         <div class="jd-tagdata">
@@ -548,6 +569,7 @@
         <?cs call:federated_refs(method) ?>
       </div>
     <div class="jd-details-descr">
+      <?cs call:show_annotations_list(method) ?>
       <?cs call:description(method) ?>
     </div>
 </div>
@@ -562,6 +584,7 @@
     <h4 class="jd-details-title"><?cs var:attr.name ?>
     </h4>
     <div class="jd-details-descr">
+        <?cs call:show_annotations_list(attr) ?>
         <?cs call:description(attr) ?>
 
         <div class="jd-tagdata">
diff --git a/tools/droiddoc/templates-sdk/classes.cs b/tools/droiddoc/templates-sdk/classes.cs
index 06592d4..405892d 100644
--- a/tools/droiddoc/templates-sdk/classes.cs
+++ b/tools/droiddoc/templates-sdk/classes.cs
@@ -1,5 +1,6 @@
 <?cs include:"doctype.cs" ?>
 <?cs include:"macros.cs" ?>
+<?cs include:"macros_override.cs" ?>
 <html<?cs if:devsite ?> devsite<?cs /if ?>>
 <?cs include:"head_tag.cs" ?>
 <body class="gc-documentation <?cs if:(reference.gms || reference.gcm) ?>google<?cs /if ?>
@@ -32,7 +33,10 @@
     <?cs each:cl = letter ?>
         <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:cl.since ?>" >
             <td class="jd-linkcol"><?cs call:type_link(cl.type) ?></td>
-            <td class="jd-descrcol" width="100%"><?cs call:short_descr(cl) ?>&nbsp;</td>
+            <td class="jd-descrcol" width="100%">
+              <?cs call:short_descr(cl) ?>&nbsp;
+              <?cs call:show_annotations_list(cl) ?>
+            </td>
         </tr>
     <?cs set:count = count + #1 ?>
     <?cs /each ?>
diff --git a/tools/droiddoc/templates-sdk/components/masthead.cs b/tools/droiddoc/templates-sdk/components/masthead.cs
index c880eee..2dde104 100644
--- a/tools/droiddoc/templates-sdk/components/masthead.cs
+++ b/tools/droiddoc/templates-sdk/components/masthead.cs
@@ -6,7 +6,7 @@
 <?cs if:!devsite ?><?cs # leave out the global header for devsite; it is in devsite template ?>
   <!-- Header -->
   <div id="header-wrapper">
-    <div id="header">
+    <div id="header"><?cs call:butter_bar() ?>
       <div class="wrap" id="header-wrap">
         <div class="col-3 logo">
           <a href="<?cs var:toroot ?>index.html">
@@ -157,7 +157,36 @@
   <?cs elif:training || guide || reference || tools || develop || google || samples ?>
     <!-- Secondary x-nav -->
     <div id="nav-x">
-        <div class="wrap">
+        <div class="wrap" style="position:relative;z-index:1">
+
+        <?cs if:reference ?>
+            <a id="helpoutsLink" class="resource resource-card resource-card-6x2x3 resource-card-6x2 helpouts-card" 
+              href="http://helpouts.google.com/partner/landing/provider/googledevelopers" target="_blank">
+              <div class="card-bg helpouts-card-bg"></div>
+              <div class="card-info">
+                <div class="helpouts-description">
+                  <div class="text">Help developers solve problems<br/>
+                    <span id="helpoutsLinkText" class="link-color" style="display:block;padding-top:5px;text-align:right">Learn more</span>
+                  </div>
+                </div>
+              </div>
+            </a>
+            <script>
+              var textA = "LEARN MORE";
+              var linkA = "http://helpouts.google.com/partner/landing/provider/googledevelopers?utm_source=android_banner1&utm_medium=banner&utm_campaign=android_provider_banner1";
+              var textB = "SIGN UP NOW";
+              var linkB = "http://helpouts.google.com/partner/landing/provider/googledevelopers?utm_source=android_banner2&utm_medium=banner&utm_campaign=android_provider_banner2";
+
+              if (Math.floor(1/Math.random()) > 1) {
+                $("a#helpoutsLink").attr('href', linkA);
+                $("span#helpoutsLinkText").text(textA);
+              } else {
+                $("a#helpoutsLink").attr('href', linkB);
+                $("span#helpoutsLinkText").text(textB);
+              }
+            </script>
+        <?cs /if ?>
+
             <ul class="nav-x col-9 develop" style="width:100%">
                 <li class="training"><a href="<?cs var:toroot ?>training/index.html"
                   zh-tw-lang="訓練課程"
@@ -261,7 +290,7 @@
 
 <!-- Header -->
 <div id="header-wrapper">
-  <div id="header">
+  <div id="header"><?cs call:butter_bar() ?>
     <div class="wrap" id="header-wrap">
       <div class="col_3 logo landing-logo" style="width:240px">
         <a href="<?cs var:toroot ?>preview/index.html">
@@ -299,3 +328,22 @@
 
   <?cs
 /def ?>
+
+
+<?cs # (UN)COMMENT THE INSIDE OF THIS METHOD TO TOGGLE VISIBILITY ?>
+<?cs def:butter_bar() ?>
+
+<?cs # HIDE THE BUTTER BAR
+
+    <div style="height:20px"><!-- spacer to bump header down --></div>
+    <div id="butterbar-wrapper">
+      <div id="butterbar">
+        <a href="http://googleblog.blogspot.com/" id="butterbar-message">
+          The Android 5.0 SDK will be available on October 17th!
+        </a>
+      </div>
+    </div>
+
+?>    
+
+<?cs /def ?>
\ No newline at end of file
diff --git a/tools/droiddoc/templates-sdk/customizations.cs b/tools/droiddoc/templates-sdk/customizations.cs
index 79cdd89..e0e3ca1 100644
--- a/tools/droiddoc/templates-sdk/customizations.cs
+++ b/tools/droiddoc/templates-sdk/customizations.cs
@@ -353,6 +353,7 @@
 <?cs 
             if:subcount(class.package) ?>
             <ul>
+              <?cs call:list("Annotations", class.package.annotations) ?>
               <?cs call:list("Interfaces", class.package.interfaces) ?>
               <?cs call:list("Classes", class.package.classes) ?>
               <?cs call:list("Enums", class.package.enums) ?>
@@ -361,6 +362,7 @@
             </ul><?cs 
             elif:subcount(package) ?>
             <ul>
+              <?cs call:class_link_list("Annotations", package.annotations) ?>
               <?cs call:class_link_list("Interfaces", package.interfaces) ?>
               <?cs call:class_link_list("Classes", package.classes) ?>
               <?cs call:class_link_list("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sdk/docpage.cs b/tools/droiddoc/templates-sdk/docpage.cs
index 7d872bc..98bc92f 100644
--- a/tools/droiddoc/templates-sdk/docpage.cs
+++ b/tools/droiddoc/templates-sdk/docpage.cs
@@ -194,10 +194,10 @@
 
 <?cs include:"trailer.cs" ?>
   <script src="https://developer.android.com/ytblogger_lists_unified.js" type="text/javascript"></script>
-  <script src="<?cs var:toroot ?>jd_lists_unified.js?v=2" type="text/javascript"></script>
-  <script src="<?cs var:toroot ?>jd_extras.js?v=3" type="text/javascript"></script>
-  <script src="<?cs var:toroot ?>jd_collections.js?v=3" type="text/javascript"></script>
-  <script src="<?cs var:toroot ?>jd_tag_helpers.js?v=2" type="text/javascript"></script>
+  <script src="<?cs var:toroot ?>jd_lists_unified.js?v=4" type="text/javascript"></script>
+  <script src="<?cs var:toroot ?>jd_extras.js?v=5" type="text/javascript"></script>
+  <script src="<?cs var:toroot ?>jd_collections.js?v=5" type="text/javascript"></script>
+  <script src="<?cs var:toroot ?>jd_tag_helpers.js?v=4" type="text/javascript"></script>
 
 </body>
 </html>
diff --git a/tools/droiddoc/templates-sdk/head_tag.cs b/tools/droiddoc/templates-sdk/head_tag.cs
index 7ecb7f9..d54f6e3 100644
--- a/tools/droiddoc/templates-sdk/head_tag.cs
+++ b/tools/droiddoc/templates-sdk/head_tag.cs
@@ -24,8 +24,8 @@
 <meta name="Description" content="<?cs var:page.metaDescription ?>"><?cs
   /if ?>
 <link rel="shortcut icon" type="image/x-icon" href="<?cs var:toroot ?>favicon.ico" />
-<title><?cs 
-  if:page.title ?><?cs 
+<title><?cs
+  if:page.title ?><?cs
     var:page.title ?> | <?cs
   /if ?>Android Developers</title>
 
@@ -64,6 +64,13 @@
 </script>
 <script src="<?cs var:toroot ?>assets/js/docs.js?v=2" type="text/javascript"></script>
 
+<?cs if:helpoutsWidget ?>
+<script type="text/javascript" src="https://helpouts.google.com/ps/res/embed.js" defer async
+    data-helpouts-embed data-helpouts-vertical="programming"
+    data-helpouts-tags="<?cs var:page.tags ?>" data-helpouts-prefix="android"
+    data-helpouts-standalone="true"></script>
+<?cs /if ?>
+
 <script>
   (function(i,s,o,g,r,a,m){i['GoogleAnalyticsObject']=r;i[r]=i[r]||function(){
   (i[r].q=i[r].q||[]).push(arguments)},i[r].l=1*new Date();a=s.createElement(o),
diff --git a/tools/droiddoc/templates-sdk/macros_override.cs b/tools/droiddoc/templates-sdk/macros_override.cs
new file mode 100644
index 0000000..1525be5
--- /dev/null
+++ b/tools/droiddoc/templates-sdk/macros_override.cs
@@ -0,0 +1,36 @@
+<?cs # Create a comma separated list of annotations on obj that were in showAnnotations in Doclava ?>
+<?cs # pre is an HTML string to start the list, post is an HTML string to close the list ?>
+<?cs # for example call:show_annotations_list(cl, "<td>Annotations: ", "</td>") ?>
+<?cs # if obj has nothing on obj.showAnnotations, nothing will be output ?>
+<?cs def:show_annotations_list(obj) ?>
+    <?cs each:anno = obj.showAnnotations ?>
+      <?cs if:first(anno) ?>
+        <span class='annotation-message'>
+          Included in documention by the annotations:
+      <?cs /if ?>
+      @<?cs var:anno.type.label ?>
+      <?cs if:last(anno) == 0 ?>
+        , &nbsp;
+      <?cs /if ?>
+      <?cs if:last(anno)?>
+        </span>
+      <?cs /if ?>
+    <?cs /each ?>
+<?cs /def ?>
+
+<?cs # Override default class_link_table to display annotations ?>
+<?cs def:class_link_table(classes) ?>
+  <?cs set:count = #1 ?>
+  <table class="jd-sumtable-expando">
+    <?cs each:cl=classes ?>
+      <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:cl.type.since ?>" >
+        <td class="jd-linkcol"><?cs call:type_link(cl.type) ?></td>
+        <td class="jd-descrcol" width="100%">
+          <?cs call:short_descr(cl) ?>&nbsp;
+          <?cs call:show_annotations_list(cl) ?>
+        </td>
+      </tr>
+      <?cs set:count = count + #1 ?>
+    <?cs /each ?>
+  </table>
+<?cs /def ?>
\ No newline at end of file
diff --git a/tools/droiddoc/templates-sdk/package.cs b/tools/droiddoc/templates-sdk/package.cs
index 445e606..2225565 100644
--- a/tools/droiddoc/templates-sdk/package.cs
+++ b/tools/droiddoc/templates-sdk/package.cs
@@ -1,5 +1,6 @@
 <?cs include:"doctype.cs" ?>
 <?cs include:"macros.cs" ?>
+<?cs include:"macros_override.cs" ?>
 <html<?cs if:devsite ?> devsite<?cs /if ?>>
 <?cs include:"head_tag.cs" ?>
 
@@ -47,6 +48,7 @@
   <?cs /if ?>
 <?cs /def ?>
 
+<?cs call:class_table("Annotations", package.annotations) ?>
 <?cs call:class_table("Interfaces", package.interfaces) ?>
 <?cs call:class_table("Classes", package.classes) ?>
 <?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sdk/sdkpage.cs b/tools/droiddoc/templates-sdk/sdkpage.cs
index a069b1d..bbe6e97 100644
--- a/tools/droiddoc/templates-sdk/sdkpage.cs
+++ b/tools/droiddoc/templates-sdk/sdkpage.cs
@@ -77,7 +77,7 @@
 
   <table class="download" id="download-table">
     <tr>
-      <th>Platform<br>(32-bit target)</th>
+      <th>Platform</th>
       <th>Package</th>
       <th style="white-space:nowrap">Size (Bytes)</th>
       <th>MD5 Checksum</th>
@@ -184,129 +184,6 @@
     <td><?cs var:ndk.linux64.legacy_checksum ?></td>
   </tr> -->
 
-    <tr>
-      <th>Platform<br>(64-bit target)</th>
-      <th>Package</th>
-      <th style="white-space:nowrap">Size (Bytes)</th>
-      <th>MD5 Checksum</th>
-  </tr>
-  <tr>
-    <td>Windows 32-bit</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.win32_64_download ?>"><?cs var:ndk.win32_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.win32_64_bytes ?></td>
-    <td><?cs var:ndk.win32_64_checksum ?></td>
-  </tr>
- <!-- <tr>
-   <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.win32.legacy_download ?>"><?cs var:ndk.win32.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.win32.legacy_bytes ?></td>
-    <td><?cs var:ndk.win32.legacy_checksum ?></td>
-  </tr> -->
-  <tr>
-    <td>Windows 64-bit</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.win64_64_download ?>"><?cs var:ndk.win64_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.win64_64_bytes ?></td>
-    <td><?cs var:ndk.win64_64_checksum ?></td>
-  </tr>
- <!--  <tr>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.win64.legacy_download ?>"><?cs var:ndk.win64.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.win64.legacy_bytes ?></td>
-    <td><?cs var:ndk.win64.legacy_checksum ?></td>
-  </tr> -->
-  <tr>
-    <td>Mac OS X 32-bit</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.mac32_64_download ?>"><?cs var:ndk.mac32_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.mac32_64_bytes ?></td>
-    <td><?cs var:ndk.mac32_64_checksum ?></td>
-  </tr>
- <!--  <tr>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.mac32.legacy_download ?>"><?cs var:ndk.mac32.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.mac32.legacy_bytes ?></td>
-    <td><?cs var:ndk.mac32.legacy_checksum ?></td>
-  </tr> -->
-    <td>Mac OS X 64-bit</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.mac64_64_download ?>"><?cs var:ndk.mac64_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.mac64_64_bytes ?></td>
-    <td><?cs var:ndk.mac64_64_checksum ?></td>
-  </tr>
- <!--  <tr>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.mac64.legacy_download ?>"><?cs var:ndk.mac64.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.mac64.legacy_bytes ?></td>
-    <td><?cs var:ndk.mac64.legacy_checksum ?></td>
-  </tr> -->
-  <tr>
-    <td>Linux 32-bit (x86)</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.linux32_64_download ?>"><?cs var:ndk.linux32_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.linux32_64_bytes ?></td>
-    <td><?cs var:ndk.linux32_64_checksum ?></td>
-  </tr>
- <!--  <tr>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.linux32.legacy_download ?>"><?cs var:ndk.linux32.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.linux32.legacy_bytes ?></td>
-    <td><?cs var:ndk.linux32.legacy_checksum ?></td>
-  </tr> -->
-  <tr>
-    <td>Linux 64-bit (x86)</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.linux64_64_download ?>"><?cs var:ndk.linux64_64_download ?></a>
-    </td>
-    <td><?cs var:ndk.linux64_64_bytes ?></td>
-    <td><?cs var:ndk.linux64_64_checksum ?></td>
-  </tr>
-  <!--  <tr>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.linux64.legacy_download ?>"><?cs var:ndk.linux64.legacy_download ?></a>
-    </td>
-    <td><?cs var:ndk.linux64.legacy_bytes ?></td>
-    <td><?cs var:ndk.linux64.legacy_checksum ?></td>
-  </tr> -->
-
-    <tr>
-      <th>Additional Download<br>(32-, 64-bit)</th>
-      <th>Package</th>
-      <th style="white-space:nowrap">Size (Bytes)</th>
-      <th>MD5 Checksum</th>
-  </tr>
-  <tr>
-    <td>STL debug info</td>
-    <td>
-  <a onClick="return onDownload(this)"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.debug_info_download ?>"><?cs var:ndk.debug_info_download ?></a>
-    </td>
-    <td><?cs var:ndk.debug_info_bytes ?></td>
-    <td><?cs var:ndk.debug_info_checksum ?></td>
-  </tr>
   </table>
 
   <?cs ########  HERE IS THE JD DOC CONTENT ######### ?>
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 5d1b350..030826d 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -16,9 +16,13 @@
 
 import sys
 
-# See PROP_VALUE_MAX system_properties.h.
-# PROP_VALUE_MAX in system_properties.h includes the termination NUL,
-# so we decrease it by 1 here.
+# Usage: post_process_props.py file.prop [blacklist_key, ...]
+# Blacklisted keys are removed from the property file, if present
+
+# See PROP_NAME_MAX and PROP_VALUE_MAX system_properties.h.
+# The constants in system_properties.h includes the termination NUL,
+# so we decrease the values by 1 here.
+PROP_NAME_MAX = 31
 PROP_VALUE_MAX = 91
 
 # Put the modifications that you need to make into the /system/build.prop into this
@@ -56,6 +60,11 @@
                              "").startswith("eng")
   for key, value in buildprops.iteritems():
     # Check build properties' length.
+    if len(key) > PROP_NAME_MAX:
+      check_pass = False
+      sys.stderr.write("error: %s cannot exceed %d bytes: " %
+                       (key, PROP_NAME_MAX))
+      sys.stderr.write("%s (%d)\n" % (key, len(key)))
     if len(value) > PROP_VALUE_MAX:
       # If dev build, show a warning message, otherwise fail the
       # build with error message
@@ -82,8 +91,9 @@
     for line in self.lines:
       if not line or line.startswith("#"):
         continue
-      key, value = line.split("=", 1)
-      props[key] = value
+      if "=" in line:
+        key, value = line.split("=", 1)
+        props[key] = value
     return props
 
   def get(self, name):
@@ -101,6 +111,10 @@
         return
     self.lines.append(key + value)
 
+  def delete(self, name):
+    key = name + "="
+    self.lines = [ line for line in self.lines if not line.startswith(key) ]
+
   def write(self, f):
     f.write("\n".join(self.lines))
     f.write("\n")
@@ -124,6 +138,10 @@
   if not validate(properties):
     sys.exit(1)
 
+  # Drop any blacklisted keys
+  for key in argv[2:]:
+    properties.delete(key)
+
   f = open(filename, 'w+')
   properties.write(f)
   f.close()
diff --git a/tools/releasetools/add_img_to_target_files b/tools/releasetools/add_img_to_target_files
new file mode 120000
index 0000000..04323bd
--- /dev/null
+++ b/tools/releasetools/add_img_to_target_files
@@ -0,0 +1 @@
+add_img_to_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
new file mode 100755
index 0000000..bf217e0
--- /dev/null
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -0,0 +1,268 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Given a target-files zipfile that does not contain images (ie, does
+not have an IMAGES/ top-level subdirectory), produce the images and
+add them to the zipfile.
+
+Usage:  add_img_to_target_files target_files
+"""
+
+import sys
+
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
+  sys.exit(1)
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+import zipfile
+
+# missing in Python 2.4 and before
+if not hasattr(os, "SEEK_SET"):
+  os.SEEK_SET = 0
+
+import build_image
+import common
+
+OPTIONS = common.OPTIONS
+
+
+def AddSystem(output_zip, prefix="IMAGES/"):
+  """Turn the contents of SYSTEM into a system image and store it in
+  output_zip."""
+  block_list = common.MakeTempFile(prefix="system-blocklist-", suffix=".map")
+  imgname = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict,
+                        block_list=block_list)
+  with open(imgname, "rb") as f:
+    common.ZipWriteStr(output_zip, prefix + "system.img", f.read())
+  with open(block_list, "rb") as f:
+    common.ZipWriteStr(output_zip, prefix + "system.map", f.read())
+
+
+def BuildSystem(input_dir, info_dict, block_list=None):
+  """Build the (sparse) system image and return the name of a temp
+  file containing it."""
+  return CreateImage(input_dir, info_dict, "system", block_list=block_list)
+
+
+def AddVendor(output_zip, prefix="IMAGES/"):
+  """Turn the contents of VENDOR into a vendor image and store in it
+  output_zip."""
+  block_list = common.MakeTempFile(prefix="vendor-blocklist-", suffix=".map")
+  imgname = BuildVendor(OPTIONS.input_tmp, OPTIONS.info_dict,
+                     block_list=block_list)
+  with open(imgname, "rb") as f:
+    common.ZipWriteStr(output_zip, prefix + "vendor.img", f.read())
+  with open(block_list, "rb") as f:
+    common.ZipWriteStr(output_zip, prefix + "vendor.map", f.read())
+
+
+def BuildVendor(input_dir, info_dict, block_list=None):
+  """Build the (sparse) vendor image and return the name of a temp
+  file containing it."""
+  return CreateImage(input_dir, info_dict, "vendor", block_list=block_list)
+
+
+def CreateImage(input_dir, info_dict, what, block_list=None):
+  print "creating " + what + ".img..."
+
+  img = common.MakeTempFile(prefix=what + "-", suffix=".img")
+
+  # The name of the directory it is making an image out of matters to
+  # mkyaffs2image.  It wants "system" but we have a directory named
+  # "SYSTEM", so create a symlink.
+  try:
+    os.symlink(os.path.join(input_dir, what.upper()),
+               os.path.join(input_dir, what))
+  except OSError, e:
+      # bogus error on my mac version?
+      #   File "./build/tools/releasetools/img_from_target_files", line 86, in AddSystem
+      #     os.path.join(OPTIONS.input_tmp, "system"))
+      # OSError: [Errno 17] File exists
+    if (e.errno == errno.EEXIST):
+      pass
+
+  image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
+  fstab = info_dict["fstab"]
+  if fstab:
+    image_props["fs_type" ] = fstab["/" + what].fs_type
+
+  if what == "system":
+    fs_config_prefix = ""
+  else:
+    fs_config_prefix = what + "_"
+
+  fs_config = os.path.join(
+      input_dir, "META/" + fs_config_prefix + "filesystem_config.txt")
+  if not os.path.exists(fs_config): fs_config = None
+
+  fc_config = os.path.join(input_dir, "BOOT/RAMDISK/file_contexts")
+  if not os.path.exists(fc_config): fc_config = None
+
+  succ = build_image.BuildImage(os.path.join(input_dir, what),
+                                image_props, img,
+                                fs_config=fs_config,
+                                fc_config=fc_config,
+                                block_list=block_list)
+  assert succ, "build " + what + ".img image failed"
+
+  return img
+
+
+def AddUserdata(output_zip, prefix="IMAGES/"):
+  """Create an empty userdata image and store it in output_zip."""
+
+  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
+                                                    "data")
+  # We only allow yaffs to have a 0/missing partition_size.
+  # Extfs, f2fs must have a size. Skip userdata.img if no size.
+  if (not image_props.get("fs_type", "").startswith("yaffs") and
+      not image_props.get("partition_size")):
+    return
+
+  print "creating userdata.img..."
+
+  # The name of the directory it is making an image out of matters to
+  # mkyaffs2image.  So we create a temp dir, and within it we create an
+  # empty dir named "data", and build the image from that.
+  temp_dir = tempfile.mkdtemp()
+  user_dir = os.path.join(temp_dir, "data")
+  os.mkdir(user_dir)
+  img = tempfile.NamedTemporaryFile()
+
+  fstab = OPTIONS.info_dict["fstab"]
+  if fstab:
+    image_props["fs_type" ] = fstab["/data"].fs_type
+  succ = build_image.BuildImage(user_dir, image_props, img.name)
+  assert succ, "build userdata.img image failed"
+
+  common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
+  output_zip.write(img.name, prefix + "userdata.img")
+  img.close()
+  os.rmdir(user_dir)
+  os.rmdir(temp_dir)
+
+
+def AddCache(output_zip, prefix="IMAGES/"):
+  """Create an empty cache image and store it in output_zip."""
+
+  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
+                                                    "cache")
+  # The build system has to explicitly request for cache.img.
+  if "fs_type" not in image_props:
+    return
+
+  print "creating cache.img..."
+
+  # The name of the directory it is making an image out of matters to
+  # mkyaffs2image.  So we create a temp dir, and within it we create an
+  # empty dir named "cache", and build the image from that.
+  temp_dir = tempfile.mkdtemp()
+  user_dir = os.path.join(temp_dir, "cache")
+  os.mkdir(user_dir)
+  img = tempfile.NamedTemporaryFile()
+
+  fstab = OPTIONS.info_dict["fstab"]
+  if fstab:
+    image_props["fs_type" ] = fstab["/cache"].fs_type
+  succ = build_image.BuildImage(user_dir, image_props, img.name)
+  assert succ, "build cache.img image failed"
+
+  common.CheckSize(img.name, "cache.img", OPTIONS.info_dict)
+  output_zip.write(img.name, prefix + "cache.img")
+  img.close()
+  os.rmdir(user_dir)
+  os.rmdir(temp_dir)
+
+
+def AddImagesToTargetFiles(filename):
+  OPTIONS.input_tmp, input_zip = common.UnzipTemp(filename)
+
+  for n in input_zip.namelist():
+    if n.startswith("IMAGES/"):
+      print "target_files appears to already contain images."
+      sys.exit(1)
+
+  try:
+    input_zip.getinfo("VENDOR/")
+    has_vendor = True
+  except KeyError:
+    has_vendor = False
+
+  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+  if "selinux_fc" in OPTIONS.info_dict:
+    OPTIONS.info_dict["selinux_fc"] = os.path.join(
+        OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
+
+  input_zip.close()
+  output_zip = zipfile.ZipFile(filename, "a",
+                               compression=zipfile.ZIP_DEFLATED)
+
+  def banner(s):
+    print "\n\n++++ " + s + " ++++\n\n"
+
+  banner("boot")
+  boot_image = common.GetBootableImage(
+      "IMAGES/boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+  if boot_image:
+    boot_image.AddToZip(output_zip)
+
+  banner("recovery")
+  recovery_image = common.GetBootableImage(
+      "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+  if recovery_image:
+    recovery_image.AddToZip(output_zip)
+
+  banner("system")
+  AddSystem(output_zip)
+  if has_vendor:
+    banner("vendor")
+    AddVendor(output_zip)
+  banner("userdata")
+  AddUserdata(output_zip)
+  banner("cache")
+  AddCache(output_zip)
+
+  output_zip.close()
+
+
+def main(argv):
+  args = common.ParseOptions(argv, __doc__)
+
+  if len(args) != 1:
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  AddImagesToTargetFiles(args[0])
+  print "done."
+
+if __name__ == '__main__':
+  try:
+    common.CloseInheritedPipes()
+    main(sys.argv[1:])
+  except common.ExternalError, e:
+    print
+    print "   ERROR: %s" % (e,)
+    print
+    sys.exit(1)
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
new file mode 100644
index 0000000..216486c
--- /dev/null
+++ b/tools/releasetools/blockimgdiff.py
@@ -0,0 +1,622 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+
+from collections import deque, OrderedDict
+from hashlib import sha1
+import itertools
+import multiprocessing
+import os
+import pprint
+import re
+import subprocess
+import sys
+import threading
+import tempfile
+
+from rangelib import *
+
+__all__ = ["EmptyImage", "DataImage", "BlockImageDiff"]
+
+def compute_patch(src, tgt, imgdiff=False):
+  srcfd, srcfile = tempfile.mkstemp(prefix="src-")
+  tgtfd, tgtfile = tempfile.mkstemp(prefix="tgt-")
+  patchfd, patchfile = tempfile.mkstemp(prefix="patch-")
+  os.close(patchfd)
+
+  try:
+    with os.fdopen(srcfd, "wb") as f_src:
+      for p in src:
+        f_src.write(p)
+
+    with os.fdopen(tgtfd, "wb") as f_tgt:
+      for p in tgt:
+        f_tgt.write(p)
+    try:
+      os.unlink(patchfile)
+    except OSError:
+      pass
+    if imgdiff:
+      p = subprocess.call(["imgdiff", "-z", srcfile, tgtfile, patchfile],
+                          stdout=open("/dev/null", "a"),
+                          stderr=subprocess.STDOUT)
+    else:
+      p = subprocess.call(["bsdiff", srcfile, tgtfile, patchfile])
+
+    if p:
+      raise ValueError("diff failed: " + str(p))
+
+    with open(patchfile, "rb") as f:
+      return f.read()
+  finally:
+    try:
+      os.unlink(srcfile)
+      os.unlink(tgtfile)
+      os.unlink(patchfile)
+    except OSError:
+      pass
+
+class EmptyImage(object):
+  """A zero-length image."""
+  blocksize = 4096
+  care_map = RangeSet()
+  total_blocks = 0
+  file_map = {}
+  def ReadRangeSet(self, ranges):
+    return ()
+  def TotalSha1(self):
+    return sha1().hexdigest()
+
+
+class DataImage(object):
+  """An image wrapped around a single string of data."""
+
+  def __init__(self, data, trim=False, pad=False):
+    self.data = data
+    self.blocksize = 4096
+
+    assert not (trim and pad)
+
+    partial = len(self.data) % self.blocksize
+    if partial > 0:
+      if trim:
+        self.data = self.data[:-partial]
+      elif pad:
+        self.data += '\0' * (self.blocksize - partial)
+      else:
+        raise ValueError(("data for DataImage must be multiple of %d bytes "
+                          "unless trim or pad is specified") %
+                         (self.blocksize,))
+
+    assert len(self.data) % self.blocksize == 0
+
+    self.total_blocks = len(self.data) / self.blocksize
+    self.care_map = RangeSet(data=(0, self.total_blocks))
+
+    zero_blocks = []
+    nonzero_blocks = []
+    reference = '\0' * self.blocksize
+
+    for i in range(self.total_blocks):
+      d = self.data[i*self.blocksize : (i+1)*self.blocksize]
+      if d == reference:
+        zero_blocks.append(i)
+        zero_blocks.append(i+1)
+      else:
+        nonzero_blocks.append(i)
+        nonzero_blocks.append(i+1)
+
+    self.file_map = {"__ZERO": RangeSet(zero_blocks),
+                     "__NONZERO": RangeSet(nonzero_blocks)}
+
+  def ReadRangeSet(self, ranges):
+    return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges]
+
+  def TotalSha1(self):
+    if not hasattr(self, "sha1"):
+      self.sha1 = sha1(self.data).hexdigest()
+    return self.sha1
+
+
+class Transfer(object):
+  def __init__(self, tgt_name, src_name, tgt_ranges, src_ranges, style, by_id):
+    self.tgt_name = tgt_name
+    self.src_name = src_name
+    self.tgt_ranges = tgt_ranges
+    self.src_ranges = src_ranges
+    self.style = style
+    self.intact = (getattr(tgt_ranges, "monotonic", False) and
+                   getattr(src_ranges, "monotonic", False))
+    self.goes_before = {}
+    self.goes_after = {}
+
+    self.id = len(by_id)
+    by_id.append(self)
+
+  def __str__(self):
+    return (str(self.id) + ": <" + str(self.src_ranges) + " " + self.style +
+            " to " + str(self.tgt_ranges) + ">")
+
+
+# BlockImageDiff works on two image objects.  An image object is
+# anything that provides the following attributes:
+#
+#    blocksize: the size in bytes of a block, currently must be 4096.
+#
+#    total_blocks: the total size of the partition/image, in blocks.
+#
+#    care_map: a RangeSet containing which blocks (in the range [0,
+#      total_blocks) we actually care about; i.e. which blocks contain
+#      data.
+#
+#    file_map: a dict that partitions the blocks contained in care_map
+#      into smaller domains that are useful for doing diffs on.
+#      (Typically a domain is a file, and the key in file_map is the
+#      pathname.)
+#
+#    ReadRangeSet(): a function that takes a RangeSet and returns the
+#      data contained in the image blocks of that RangeSet.  The data
+#      is returned as a list or tuple of strings; concatenating the
+#      elements together should produce the requested data.
+#      Implementations are free to break up the data into list/tuple
+#      elements in any way that is convenient.
+#
+#    TotalSha1(): a function that returns (as a hex string) the SHA-1
+#      hash of all the data in the image (ie, all the blocks in the
+#      care_map)
+#
+# When creating a BlockImageDiff, the src image may be None, in which
+# case the list of transfers produced will never read from the
+# original image.
+
+class BlockImageDiff(object):
+  def __init__(self, tgt, src=None, threads=None):
+    if threads is None:
+      threads = multiprocessing.cpu_count() // 2
+      if threads == 0: threads = 1
+    self.threads = threads
+
+    self.tgt = tgt
+    if src is None:
+      src = EmptyImage()
+    self.src = src
+
+    # The updater code that installs the patch always uses 4k blocks.
+    assert tgt.blocksize == 4096
+    assert src.blocksize == 4096
+
+    # The range sets in each filemap should comprise a partition of
+    # the care map.
+    self.AssertPartition(src.care_map, src.file_map.values())
+    self.AssertPartition(tgt.care_map, tgt.file_map.values())
+
+  def Compute(self, prefix):
+    # When looking for a source file to use as the diff input for a
+    # target file, we try:
+    #   1) an exact path match if available, otherwise
+    #   2) a exact basename match if available, otherwise
+    #   3) a basename match after all runs of digits are replaced by
+    #      "#" if available, otherwise
+    #   4) we have no source for this target.
+    self.AbbreviateSourceNames()
+    self.FindTransfers()
+
+    # Find the ordering dependencies among transfers (this is O(n^2)
+    # in the number of transfers).
+    self.GenerateDigraph()
+    # Find a sequence of transfers that satisfies as many ordering
+    # dependencies as possible (heuristically).
+    self.FindVertexSequence()
+    # Fix up the ordering dependencies that the sequence didn't
+    # satisfy.
+    self.RemoveBackwardEdges()
+    # Double-check our work.
+    self.AssertSequenceGood()
+
+    self.ComputePatches(prefix)
+    self.WriteTransfers(prefix)
+
+  def WriteTransfers(self, prefix):
+    out = []
+
+    out.append("1\n")   # format version number
+    total = 0
+    performs_read = False
+
+    for xf in self.transfers:
+
+      # zero [rangeset]
+      # new [rangeset]
+      # bsdiff patchstart patchlen [src rangeset] [tgt rangeset]
+      # imgdiff patchstart patchlen [src rangeset] [tgt rangeset]
+      # move [src rangeset] [tgt rangeset]
+      # erase [rangeset]
+
+      tgt_size = xf.tgt_ranges.size()
+
+      if xf.style == "new":
+        assert xf.tgt_ranges
+        out.append("%s %s\n" % (xf.style, xf.tgt_ranges.to_string_raw()))
+        total += tgt_size
+      elif xf.style == "move":
+        performs_read = True
+        assert xf.tgt_ranges
+        assert xf.src_ranges.size() == tgt_size
+        if xf.src_ranges != xf.tgt_ranges:
+          out.append("%s %s %s\n" % (
+              xf.style,
+              xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+          total += tgt_size
+      elif xf.style in ("bsdiff", "imgdiff"):
+        performs_read = True
+        assert xf.tgt_ranges
+        assert xf.src_ranges
+        out.append("%s %d %d %s %s\n" % (
+            xf.style, xf.patch_start, xf.patch_len,
+            xf.src_ranges.to_string_raw(), xf.tgt_ranges.to_string_raw()))
+        total += tgt_size
+      elif xf.style == "zero":
+        assert xf.tgt_ranges
+        to_zero = xf.tgt_ranges.subtract(xf.src_ranges)
+        if to_zero:
+          out.append("%s %s\n" % (xf.style, to_zero.to_string_raw()))
+          total += to_zero.size()
+      else:
+        raise ValueError, "unknown transfer style '%s'\n" % (xf.style,)
+
+    out.insert(1, str(total) + "\n")
+
+    all_tgt = RangeSet(data=(0, self.tgt.total_blocks))
+    if performs_read:
+      # if some of the original data is used, then at the end we'll
+      # erase all the blocks on the partition that don't contain data
+      # in the new image.
+      new_dontcare = all_tgt.subtract(self.tgt.care_map)
+      if new_dontcare:
+        out.append("erase %s\n" % (new_dontcare.to_string_raw(),))
+    else:
+      # if nothing is read (ie, this is a full OTA), then we can start
+      # by erasing the entire partition.
+      out.insert(2, "erase %s\n" % (all_tgt.to_string_raw(),))
+
+    with open(prefix + ".transfer.list", "wb") as f:
+      for i in out:
+        f.write(i)
+
+  def ComputePatches(self, prefix):
+    print("Reticulating splines...")
+    diff_q = []
+    patch_num = 0
+    with open(prefix + ".new.dat", "wb") as new_f:
+      for xf in self.transfers:
+        if xf.style == "zero":
+          pass
+        elif xf.style == "new":
+          for piece in self.tgt.ReadRangeSet(xf.tgt_ranges):
+            new_f.write(piece)
+        elif xf.style == "diff":
+          src = self.src.ReadRangeSet(xf.src_ranges)
+          tgt = self.tgt.ReadRangeSet(xf.tgt_ranges)
+
+          # We can't compare src and tgt directly because they may have
+          # the same content but be broken up into blocks differently, eg:
+          #
+          #    ["he", "llo"]  vs  ["h", "ello"]
+          #
+          # We want those to compare equal, ideally without having to
+          # actually concatenate the strings (these may be tens of
+          # megabytes).
+
+          src_sha1 = sha1()
+          for p in src:
+            src_sha1.update(p)
+          tgt_sha1 = sha1()
+          tgt_size = 0
+          for p in tgt:
+            tgt_sha1.update(p)
+            tgt_size += len(p)
+
+          if src_sha1.digest() == tgt_sha1.digest():
+            # These are identical; we don't need to generate a patch,
+            # just issue copy commands on the device.
+            xf.style = "move"
+          else:
+            # For files in zip format (eg, APKs, JARs, etc.) we would
+            # like to use imgdiff -z if possible (because it usually
+            # produces significantly smaller patches than bsdiff).
+            # This is permissible if:
+            #
+            #  - the source and target files are monotonic (ie, the
+            #    data is stored with blocks in increasing order), and
+            #  - we haven't removed any blocks from the source set.
+            #
+            # If these conditions are satisfied then appending all the
+            # blocks in the set together in order will produce a valid
+            # zip file (plus possibly extra zeros in the last block),
+            # which is what imgdiff needs to operate.  (imgdiff is
+            # fine with extra zeros at the end of the file.)
+            imgdiff = (xf.intact and
+                       xf.tgt_name.split(".")[-1].lower()
+                       in ("apk", "jar", "zip"))
+            xf.style = "imgdiff" if imgdiff else "bsdiff"
+            diff_q.append((tgt_size, src, tgt, xf, patch_num))
+            patch_num += 1
+
+        else:
+          assert False, "unknown style " + xf.style
+
+    if diff_q:
+      if self.threads > 1:
+        print("Computing patches (using %d threads)..." % (self.threads,))
+      else:
+        print("Computing patches...")
+      diff_q.sort()
+
+      patches = [None] * patch_num
+
+      lock = threading.Lock()
+      def diff_worker():
+        while True:
+          with lock:
+            if not diff_q: return
+            tgt_size, src, tgt, xf, patchnum = diff_q.pop()
+          patch = compute_patch(src, tgt, imgdiff=(xf.style == "imgdiff"))
+          size = len(patch)
+          with lock:
+            patches[patchnum] = (patch, xf)
+            print("%10d %10d (%6.2f%%) %7s %s" % (
+                size, tgt_size, size * 100.0 / tgt_size, xf.style,
+                xf.tgt_name if xf.tgt_name == xf.src_name else (
+                    xf.tgt_name + " (from " + xf.src_name + ")")))
+
+      threads = [threading.Thread(target=diff_worker)
+                 for i in range(self.threads)]
+      for th in threads:
+        th.start()
+      while threads:
+        threads.pop().join()
+    else:
+      patches = []
+
+    p = 0
+    with open(prefix + ".patch.dat", "wb") as patch_f:
+      for patch, xf in patches:
+        xf.patch_start = p
+        xf.patch_len = len(patch)
+        patch_f.write(patch)
+        p += len(patch)
+
+  def AssertSequenceGood(self):
+    # Simulate the sequences of transfers we will output, and check that:
+    # - we never read a block after writing it, and
+    # - we write every block we care about exactly once.
+
+    # Start with no blocks having been touched yet.
+    touched = RangeSet()
+
+    # Imagine processing the transfers in order.
+    for xf in self.transfers:
+      # Check that the input blocks for this transfer haven't yet been touched.
+      assert not touched.overlaps(xf.src_ranges)
+      # Check that the output blocks for this transfer haven't yet been touched.
+      assert not touched.overlaps(xf.tgt_ranges)
+      # Touch all the blocks written by this transfer.
+      touched = touched.union(xf.tgt_ranges)
+
+    # Check that we've written every target block.
+    assert touched == self.tgt.care_map
+
+  def RemoveBackwardEdges(self):
+    print("Removing backward edges...")
+    in_order = 0
+    out_of_order = 0
+    lost_source = 0
+
+    for xf in self.transfers:
+      io = 0
+      ooo = 0
+      lost = 0
+      size = xf.src_ranges.size()
+      for u in xf.goes_before:
+        # xf should go before u
+        if xf.order < u.order:
+          # it does, hurray!
+          io += 1
+        else:
+          # it doesn't, boo.  trim the blocks that u writes from xf's
+          # source, so that xf can go after u.
+          ooo += 1
+          assert xf.src_ranges.overlaps(u.tgt_ranges)
+          xf.src_ranges = xf.src_ranges.subtract(u.tgt_ranges)
+          xf.intact = False
+
+      if xf.style == "diff" and not xf.src_ranges:
+        # nothing left to diff from; treat as new data
+        xf.style = "new"
+
+      lost = size - xf.src_ranges.size()
+      lost_source += lost
+      in_order += io
+      out_of_order += ooo
+
+    print(("  %d/%d dependencies (%.2f%%) were violated; "
+           "%d source blocks removed.") %
+          (out_of_order, in_order + out_of_order,
+           (out_of_order * 100.0 / (in_order + out_of_order))
+           if (in_order + out_of_order) else 0.0,
+           lost_source))
+
+  def FindVertexSequence(self):
+    print("Finding vertex sequence...")
+
+    # This is based on "A Fast & Effective Heuristic for the Feedback
+    # Arc Set Problem" by P. Eades, X. Lin, and W.F. Smyth.  Think of
+    # it as starting with the digraph G and moving all the vertices to
+    # be on a horizontal line in some order, trying to minimize the
+    # number of edges that end up pointing to the left.  Left-pointing
+    # edges will get removed to turn the digraph into a DAG.  In this
+    # case each edge has a weight which is the number of source blocks
+    # we'll lose if that edge is removed; we try to minimize the total
+    # weight rather than just the number of edges.
+
+    # Make a copy of the edge set; this copy will get destroyed by the
+    # algorithm.
+    for xf in self.transfers:
+      xf.incoming = xf.goes_after.copy()
+      xf.outgoing = xf.goes_before.copy()
+
+    # We use an OrderedDict instead of just a set so that the output
+    # is repeatable; otherwise it would depend on the hash values of
+    # the transfer objects.
+    G = OrderedDict()
+    for xf in self.transfers:
+      G[xf] = None
+    s1 = deque()  # the left side of the sequence, built from left to right
+    s2 = deque()  # the right side of the sequence, built from right to left
+
+    while G:
+
+      # Put all sinks at the end of the sequence.
+      while True:
+        sinks = [u for u in G if not u.outgoing]
+        if not sinks: break
+        for u in sinks:
+          s2.appendleft(u)
+          del G[u]
+          for iu in u.incoming:
+            del iu.outgoing[u]
+
+      # Put all the sources at the beginning of the sequence.
+      while True:
+        sources = [u for u in G if not u.incoming]
+        if not sources: break
+        for u in sources:
+          s1.append(u)
+          del G[u]
+          for iu in u.outgoing:
+            del iu.incoming[u]
+
+      if not G: break
+
+      # Find the "best" vertex to put next.  "Best" is the one that
+      # maximizes the net difference in source blocks saved we get by
+      # pretending it's a source rather than a sink.
+
+      max_d = None
+      best_u = None
+      for u in G:
+        d = sum(u.outgoing.values()) - sum(u.incoming.values())
+        if best_u is None or d > max_d:
+          max_d = d
+          best_u = u
+
+      u = best_u
+      s1.append(u)
+      del G[u]
+      for iu in u.outgoing:
+        del iu.incoming[u]
+      for iu in u.incoming:
+        del iu.outgoing[u]
+
+    # Now record the sequence in the 'order' field of each transfer,
+    # and by rearranging self.transfers to be in the chosen sequence.
+
+    new_transfers = []
+    for x in itertools.chain(s1, s2):
+      x.order = len(new_transfers)
+      new_transfers.append(x)
+      del x.incoming
+      del x.outgoing
+
+    self.transfers = new_transfers
+
+  def GenerateDigraph(self):
+    print("Generating digraph...")
+    for a in self.transfers:
+      for b in self.transfers:
+        if a is b: continue
+
+        # If the blocks written by A are read by B, then B needs to go before A.
+        i = a.tgt_ranges.intersect(b.src_ranges)
+        if i:
+          if b.src_name == "__ZERO":
+            # the cost of removing source blocks for the __ZERO domain
+            # is (nearly) zero.
+            size = 0
+          else:
+            size = i.size()
+          b.goes_before[a] = size
+          a.goes_after[b] = size
+
+  def FindTransfers(self):
+    self.transfers = []
+    empty = RangeSet()
+    for tgt_fn, tgt_ranges in self.tgt.file_map.items():
+      if tgt_fn == "__ZERO":
+        # the special "__ZERO" domain is all the blocks not contained
+        # in any file and that are filled with zeros.  We have a
+        # special transfer style for zero blocks.
+        src_ranges = self.src.file_map.get("__ZERO", empty)
+        Transfer(tgt_fn, "__ZERO", tgt_ranges, src_ranges,
+                 "zero", self.transfers)
+        continue
+
+      elif tgt_fn in self.src.file_map:
+        # Look for an exact pathname match in the source.
+        Transfer(tgt_fn, tgt_fn, tgt_ranges, self.src.file_map[tgt_fn],
+                 "diff", self.transfers)
+        continue
+
+      b = os.path.basename(tgt_fn)
+      if b in self.src_basenames:
+        # Look for an exact basename match in the source.
+        src_fn = self.src_basenames[b]
+        Transfer(tgt_fn, src_fn, tgt_ranges, self.src.file_map[src_fn],
+                 "diff", self.transfers)
+        continue
+
+      b = re.sub("[0-9]+", "#", b)
+      if b in self.src_numpatterns:
+        # Look for a 'number pattern' match (a basename match after
+        # all runs of digits are replaced by "#").  (This is useful
+        # for .so files that contain version numbers in the filename
+        # that get bumped.)
+        src_fn = self.src_numpatterns[b]
+        Transfer(tgt_fn, src_fn, tgt_ranges, self.src.file_map[src_fn],
+                 "diff", self.transfers)
+        continue
+
+      Transfer(tgt_fn, None, tgt_ranges, empty, "new", self.transfers)
+
+  def AbbreviateSourceNames(self):
+    self.src_basenames = {}
+    self.src_numpatterns = {}
+
+    for k in self.src.file_map.keys():
+      b = os.path.basename(k)
+      self.src_basenames[b] = k
+      b = re.sub("[0-9]+", "#", b)
+      self.src_numpatterns[b] = k
+
+  @staticmethod
+  def AssertPartition(total, seq):
+    """Assert that all the RangeSets in 'seq' form a partition of the
+    'total' RangeSet (ie, they are nonintersecting and their union
+    equals 'total')."""
+    so_far = RangeSet()
+    for i in seq:
+      assert not so_far.overlaps(i)
+      so_far = so_far.union(i)
+    assert so_far == total
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index f8f2ada..a010e84 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -24,6 +24,11 @@
 import os.path
 import subprocess
 import sys
+import commands
+import shutil
+import tempfile
+
+FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
 
 def RunCommand(cmd):
   """ Echo and run the given command
@@ -38,13 +43,176 @@
   p.communicate()
   return p.returncode
 
-def BuildImage(in_dir, prop_dict, out_file):
+def GetVerityTreeSize(partition_size):
+  cmd = "build_verity_tree -s %d"
+  cmd %= partition_size
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print output
+    return False, 0
+  return True, int(output)
+
+def GetVerityMetadataSize(partition_size):
+  cmd = "system/extras/verity/build_verity_metadata.py -s %d"
+  cmd %= partition_size
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print output
+    return False, 0
+  return True, int(output)
+
+def AdjustPartitionSizeForVerity(partition_size):
+  """Modifies the provided partition size to account for the verity metadata.
+
+  This information is used to size the created image appropriately.
+  Args:
+    partition_size: the size of the partition to be verified.
+  Returns:
+    The size of the partition adjusted for verity metadata.
+  """
+  success, verity_tree_size = GetVerityTreeSize(partition_size)
+  if not success:
+    return 0;
+  success, verity_metadata_size = GetVerityMetadataSize(partition_size)
+  if not success:
+    return 0
+  return partition_size - verity_tree_size - verity_metadata_size
+
+def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
+  cmd = ("build_verity_tree -A %s %s %s" % (FIXED_SALT, sparse_image_path, verity_image_path))
+  print cmd
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print "Could not build verity tree! Error: %s" % output
+    return False
+  root, salt = output.split()
+  prop_dict["verity_root_hash"] = root
+  prop_dict["verity_salt"] = salt
+  return True
+
+def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
+                        block_device, signer_path, key):
+  cmd = ("system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s" %
+              (image_size,
+              verity_metadata_path,
+              root_hash,
+              salt,
+              block_device,
+              signer_path,
+              key))
+  print cmd
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print "Could not build verity metadata! Error: %s" % output
+    return False
+  return True
+
+def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
+  """Appends the unsparse image to the given sparse image.
+
+  Args:
+    sparse_image_path: the path to the (sparse) image
+    unsparse_image_path: the path to the (unsparse) image
+  Returns:
+    True on success, False on failure.
+  """
+  cmd = "append2simg %s %s"
+  cmd %= (sparse_image_path, unsparse_image_path)
+  print cmd
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print "%s: %s" % (error_message, output)
+    return False
+  return True
+
+def BuildVerifiedImage(data_image_path, verity_image_path, verity_metadata_path):
+  if not Append2Simg(data_image_path, verity_metadata_path, "Could not append verity metadata!"):
+    return False
+  if not Append2Simg(data_image_path, verity_image_path, "Could not append verity tree!"):
+    return False
+  return True
+
+def UnsparseImage(sparse_image_path, replace=True):
+  img_dir = os.path.dirname(sparse_image_path)
+  unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
+  unsparse_image_path = os.path.join(img_dir, unsparse_image_path)
+  if os.path.exists(unsparse_image_path):
+    if replace:
+      os.unlink(unsparse_image_path)
+    else:
+      return True, unsparse_image_path
+  inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
+  exit_code = RunCommand(inflate_command)
+  if exit_code != 0:
+    os.remove(unsparse_image_path)
+    return False, None
+  return True, unsparse_image_path
+
+def MakeVerityEnabledImage(out_file, prop_dict):
+  """Creates an image that is verifiable using dm-verity.
+
+  Args:
+    out_file: the location to write the verifiable image at
+    prop_dict: a dictionary of properties required for image creation and verification
+  Returns:
+    True on success, False otherwise.
+  """
+  # get properties
+  image_size = prop_dict["partition_size"]
+  block_dev = prop_dict["verity_block_device"]
+  signer_key = prop_dict["verity_key"]
+  signer_path = prop_dict["verity_signer_cmd"]
+
+  # make a tempdir
+  tempdir_name = tempfile.mkdtemp(suffix="_verity_images")
+
+  # get partial image paths
+  verity_image_path = os.path.join(tempdir_name, "verity.img")
+  verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
+
+  # build the verity tree and get the root hash and salt
+  if not BuildVerityTree(out_file, verity_image_path, prop_dict):
+    shutil.rmtree(tempdir_name, ignore_errors=True)
+    return False
+
+  # build the metadata blocks
+  root_hash = prop_dict["verity_root_hash"]
+  salt = prop_dict["verity_salt"]
+  if not BuildVerityMetadata(image_size,
+                              verity_metadata_path,
+                              root_hash,
+                              salt,
+                              block_dev,
+                              signer_path,
+                              signer_key):
+    shutil.rmtree(tempdir_name, ignore_errors=True)
+    return False
+
+  # build the full verified image
+  if not BuildVerifiedImage(out_file,
+                            verity_image_path,
+                            verity_metadata_path):
+    shutil.rmtree(tempdir_name, ignore_errors=True)
+    return False
+
+  shutil.rmtree(tempdir_name, ignore_errors=True)
+  return True
+
+def BuildImage(in_dir, prop_dict, out_file,
+               fs_config=None,
+               fc_config=None,
+               block_list=None):
   """Build an image to out_file from in_dir with property prop_dict.
 
   Args:
     in_dir: path of input directory.
     prop_dict: property dictionary.
     out_file: path of the output image file.
+    fs_config: path to the fs_config file (typically
+      META/filesystem_config.txt).  If None then the configuration in
+      the local client will be used.
+    fc_config: path to the SELinux file_contexts file.  If None then
+      the value from prop_dict['selinux_fc'] will be used.
 
   Returns:
     True iff the image is built successfully.
@@ -52,6 +220,18 @@
   build_command = []
   fs_type = prop_dict.get("fs_type", "")
   run_fsck = False
+
+  is_verity_partition = "verity_block_device" in prop_dict
+  verity_supported = prop_dict.get("verity") == "true"
+  # adjust the partition size to make room for the hashes if this is to be verified
+  if verity_supported and is_verity_partition:
+    partition_size = int(prop_dict.get("partition_size"))
+    adjusted_size = AdjustPartitionSizeForVerity(partition_size)
+    if not adjusted_size:
+      return False
+    prop_dict["partition_size"] = str(adjusted_size)
+    prop_dict["original_partition_size"] = str(partition_size)
+
   if fs_type.startswith("ext"):
     build_command = ["mkuserimg.sh"]
     if "extfs_sparse_flag" in prop_dict:
@@ -59,10 +239,20 @@
       run_fsck = True
     build_command.extend([in_dir, out_file, fs_type,
                           prop_dict["mount_point"]])
-    if "partition_size" in prop_dict:
-      build_command.append(prop_dict["partition_size"])
-    if "selinux_fc" in prop_dict:
+    build_command.append(prop_dict["partition_size"])
+    if "timestamp" in prop_dict:
+      build_command.extend(["-T", str(prop_dict["timestamp"])])
+    if fs_config is not None:
+      build_command.extend(["-C", fs_config])
+    if block_list is not None:
+      build_command.extend(["-B", block_list])
+    if fc_config is not None:
+      build_command.append(fc_config)
+    elif "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
+  elif fs_type.startswith("f2fs"):
+    build_command = ["mkf2fsuserimg.sh"]
+    build_command.extend([out_file, prop_dict["partition_size"]])
   else:
     build_command = ["mkyaffs2image", "-f"]
     if prop_dict.get("mkyaffs2_extra_flags", None):
@@ -77,14 +267,14 @@
   if exit_code != 0:
     return False
 
+  # create the verified image if this is to be verified
+  if verity_supported and is_verity_partition:
+    if not MakeVerityEnabledImage(out_file, prop_dict):
+      return False
+
   if run_fsck and prop_dict.get("skip_fsck") != "true":
-    # Inflate the sparse image
-    unsparse_image = os.path.join(
-        os.path.dirname(out_file), "unsparse_" + os.path.basename(out_file))
-    inflate_command = ["simg2img", out_file, unsparse_image]
-    exit_code = RunCommand(inflate_command)
-    if exit_code != 0:
-      os.remove(unsparse_image)
+    success, unsparse_image = UnsparseImage(out_file, replace=False)
+    if not success:
       return False
 
     # Run e2fsck on the inflated image file
@@ -104,6 +294,10 @@
     mount_point: such as "system", "data" etc.
   """
   d = {}
+  if "build.prop" in glob_dict:
+    bp = glob_dict["build.prop"]
+    if "ro.build.date.utc" in bp:
+      d["timestamp"] = bp["ro.build.date.utc"]
 
   def copy_prop(src_p, dest_p):
     if src_p in glob_dict:
@@ -114,6 +308,9 @@
       "mkyaffs2_extra_flags",
       "selinux_fc",
       "skip_fsck",
+      "verity",
+      "verity_key",
+      "verity_signer_cmd"
       )
   for p in common_props:
     copy_prop(p, p)
@@ -122,8 +319,11 @@
   if mount_point == "system":
     copy_prop("fs_type", "fs_type")
     copy_prop("system_size", "partition_size")
+    copy_prop("system_verity_block_device", "verity_block_device")
   elif mount_point == "data":
+    # Copy the generic fs type first, override with specific one if available.
     copy_prop("fs_type", "fs_type")
+    copy_prop("userdata_fs_type", "fs_type")
     copy_prop("userdata_size", "partition_size")
   elif mount_point == "cache":
     copy_prop("cache_fs_type", "fs_type")
@@ -131,6 +331,10 @@
   elif mount_point == "vendor":
     copy_prop("vendor_fs_type", "fs_type")
     copy_prop("vendor_size", "partition_size")
+    copy_prop("vendor_verity_block_device", "verity_block_device")
+  elif mount_point == "oem":
+    copy_prop("fs_type", "fs_type")
+    copy_prop("oem_size", "partition_size")
 
   return d
 
@@ -169,6 +373,8 @@
     mount_point = "cache"
   elif image_filename == "vendor.img":
     mount_point = "vendor"
+  elif image_filename == "oem.img":
+    mount_point = "oem"
   else:
     print >> sys.stderr, "error: unknown image file name ", image_filename
     exit(1)
diff --git a/tools/releasetools/check_target_files_signatures b/tools/releasetools/check_target_files_signatures
index 45d30a6..b2f46c1 100755
--- a/tools/releasetools/check_target_files_signatures
+++ b/tools/releasetools/check_target_files_signatures
@@ -41,8 +41,8 @@
 
 import sys
 
-if sys.hexversion < 0x02040000:
-  print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
   sys.exit(1)
 
 import os
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 7cd70f4..815c76c 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -29,6 +29,9 @@
 import time
 import zipfile
 
+import blockimgdiff
+from rangelib import *
+
 try:
   from hashlib import sha1 as sha1
 except ImportError:
@@ -40,7 +43,13 @@
 
 class Options(object): pass
 OPTIONS = Options()
-OPTIONS.search_path = "out/host/linux-x86"
+
+DEFAULT_SEARCH_PATH_BY_PLATFORM = {
+    "linux2": "out/host/linux-x86",
+    "darwin": "out/host/darwin-x86",
+    }
+OPTIONS.search_path = DEFAULT_SEARCH_PATH_BY_PLATFORM.get(sys.platform, None)
+
 OPTIONS.signapk_path = "framework/signapk.jar"  # Relative to search_path
 OPTIONS.extra_signapk_args = []
 OPTIONS.java_path = "java"  # Use the one on the path by default.
@@ -85,17 +94,24 @@
       pass
 
 
-def LoadInfoDict(zip):
+def LoadInfoDict(input):
   """Read and parse the META/misc_info.txt key/value pairs from the
   input target files and return a dict."""
 
+  def read_helper(fn):
+    if isinstance(input, zipfile.ZipFile):
+      return input.read(fn)
+    else:
+      path = os.path.join(input, *fn.split("/"))
+      try:
+        with open(path) as f:
+          return f.read()
+      except IOError, e:
+        if e.errno == errno.ENOENT:
+          raise KeyError(fn)
   d = {}
   try:
-    for line in zip.read("META/misc_info.txt").split("\n"):
-      line = line.strip()
-      if not line or line.startswith("#"): continue
-      k, v = line.split("=", 1)
-      d[k] = v
+    d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
   except KeyError:
     # ok if misc_info.txt doesn't exist
     pass
@@ -106,20 +122,20 @@
 
   if "mkyaffs2_extra_flags" not in d:
     try:
-      d["mkyaffs2_extra_flags"] = zip.read("META/mkyaffs2-extra-flags.txt").strip()
+      d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip()
     except KeyError:
       # ok if flags don't exist
       pass
 
   if "recovery_api_version" not in d:
     try:
-      d["recovery_api_version"] = zip.read("META/recovery-api-version.txt").strip()
+      d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip()
     except KeyError:
       raise ValueError("can't find recovery API version in input target-files")
 
   if "tool_extensions" not in d:
     try:
-      d["tool_extensions"] = zip.read("META/tool-extensions.txt").strip()
+      d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
     except KeyError:
       # ok if extensions don't exist
       pass
@@ -128,7 +144,7 @@
     d["fstab_version"] = "1"
 
   try:
-    data = zip.read("META/imagesizes.txt")
+    data = read_helper("META/imagesizes.txt")
     for line in data.split("\n"):
       if not line: continue
       name, value = line.split(" ", 1)
@@ -147,39 +163,43 @@
   makeint("recovery_api_version")
   makeint("blocksize")
   makeint("system_size")
+  makeint("vendor_size")
   makeint("userdata_size")
   makeint("cache_size")
   makeint("recovery_size")
   makeint("boot_size")
   makeint("fstab_version")
 
-  d["fstab"] = LoadRecoveryFSTab(zip, d["fstab_version"])
-  d["build.prop"] = LoadBuildProp(zip)
+  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
+  d["build.prop"] = LoadBuildProp(read_helper)
   return d
 
-def LoadBuildProp(zip):
+def LoadBuildProp(read_helper):
   try:
-    data = zip.read("SYSTEM/build.prop")
+    data = read_helper("SYSTEM/build.prop")
   except KeyError:
     print "Warning: could not find SYSTEM/build.prop in %s" % zip
     data = ""
+  return LoadDictionaryFromLines(data.split("\n"))
 
+def LoadDictionaryFromLines(lines):
   d = {}
-  for line in data.split("\n"):
+  for line in lines:
     line = line.strip()
     if not line or line.startswith("#"): continue
-    name, value = line.split("=", 1)
-    d[name] = value
+    if "=" in line:
+      name, value = line.split("=", 1)
+      d[name] = value
   return d
 
-def LoadRecoveryFSTab(zip, fstab_version):
+def LoadRecoveryFSTab(read_helper, fstab_version):
   class Partition(object):
     pass
 
   try:
-    data = zip.read("RECOVERY/RAMDISK/etc/recovery.fstab")
+    data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
   except KeyError:
-    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab in %s." % zip
+    print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
     data = ""
 
   if fstab_version == 1:
@@ -293,6 +313,11 @@
 
   cmd = [mkbootimg, "--kernel", os.path.join(sourcedir, "kernel")]
 
+  fn = os.path.join(sourcedir, "second")
+  if os.access(fn, os.F_OK):
+    cmd.append("--second")
+    cmd.append(fn)
+
   fn = os.path.join(sourcedir, "cmdline")
   if os.access(fn, os.F_OK):
     cmd.append("--cmdline")
@@ -320,6 +345,13 @@
   assert p.returncode == 0, "mkbootimg of %s image failed" % (
       os.path.basename(sourcedir),)
 
+  if info_dict.get("verity_key", None):
+    path = "/" + os.path.basename(sourcedir).lower()
+    cmd = ["boot_signer", path, img.name, info_dict["verity_key"], img.name]
+    p = Run(cmd, stdout=subprocess.PIPE)
+    p.communicate()
+    assert p.returncode == 0, "boot_signer of %s image failed" % path
+
   img.seek(os.SEEK_SET, 0)
   data = img.read()
 
@@ -333,22 +365,28 @@
                      info_dict=None):
   """Return a File object (with name 'name') with the desired bootable
   image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
-  'prebuilt_name', otherwise construct it from the source files in
+  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
+  otherwise construct it from the source files in
   'unpack_dir'/'tree_subdir'."""
 
   prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
   if os.path.exists(prebuilt_path):
-    print "using prebuilt %s..." % (prebuilt_name,)
+    print "using prebuilt %s from BOOTABLE_IMAGES..." % (prebuilt_name,)
     return File.FromLocalFile(name, prebuilt_path)
-  else:
-    print "building image from target_files %s..." % (tree_subdir,)
-    fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
-    data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
-                              os.path.join(unpack_dir, fs_config),
-                              info_dict)
-    if data:
-      return File(name, data)
-    return None
+
+  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
+  if os.path.exists(prebuilt_path):
+    print "using prebuilt %s from IMAGES..." % (prebuilt_name,)
+    return File.FromLocalFile(name, prebuilt_path)
+
+  print "building image from target_files %s..." % (tree_subdir,)
+  fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
+  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
+                            os.path.join(unpack_dir, fs_config),
+                            info_dict)
+  if data:
+    return File(name, data)
+  return None
 
 
 def UnzipTemp(filename, pattern=None):
@@ -620,12 +658,22 @@
       if extra_option_handler is None or not extra_option_handler(o, a):
         assert False, "unknown option \"%s\"" % (o,)
 
-  os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
-                        os.pathsep + os.environ["PATH"])
+  if OPTIONS.search_path:
+    os.environ["PATH"] = (os.path.join(OPTIONS.search_path, "bin") +
+                          os.pathsep + os.environ["PATH"])
 
   return args
 
 
+def MakeTempFile(prefix=None, suffix=None):
+  """Make a temp file and add it to the list of things to be deleted
+  when Cleanup() is called.  Return the filename."""
+  fd, fn = tempfile.mkstemp(prefix=prefix, suffix=suffix)
+  os.close(fd)
+  OPTIONS.tempfiles.append(fn)
+  return fn
+
+
 def Cleanup():
   for i in OPTIONS.tempfiles:
     if os.path.isdir(i):
@@ -733,11 +781,14 @@
     return result
 
 
-def ZipWriteStr(zip, filename, data, perms=0644):
+def ZipWriteStr(zip, filename, data, perms=0644, compression=None):
   # use a fixed timestamp so the output is repeatable.
   zinfo = zipfile.ZipInfo(filename=filename,
                           date_time=(2009, 1, 1, 0, 0, 0))
-  zinfo.compress_type = zip.compression
+  if compression is None:
+    zinfo.compress_type = zip.compression
+  else:
+    zinfo.compress_type = compression
   zinfo.external_attr = perms << 16
   zip.writestr(zinfo, data)
 
@@ -764,6 +815,7 @@
           if x == ".py":
             f = b
           info = imp.find_module(f, [d])
+        print "loaded device-specific extensions from", path
         self.module = imp.load_module("device_specific", *info)
       except ImportError:
         print "unable to load device-specific module; assuming none"
@@ -842,8 +894,8 @@
     t.flush()
     return t
 
-  def AddToZip(self, z):
-    ZipWriteStr(z, self.name, self.data)
+  def AddToZip(self, z, compression=None):
+    ZipWriteStr(z, self.name, self.data, compression=compression)
 
 DIFF_PROGRAM_BY_EXT = {
     ".gz" : "imgdiff",
@@ -888,10 +940,26 @@
       cmd.append(ttemp.name)
       cmd.append(ptemp.name)
       p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-      _, err = p.communicate()
+      err = []
+      def run():
+        _, e = p.communicate()
+        if e: err.append(e)
+      th = threading.Thread(target=run)
+      th.start()
+      th.join(timeout=300)   # 5 mins
+      if th.is_alive():
+        print "WARNING: diff command timed out"
+        p.terminate()
+        th.join(5)
+        if th.is_alive():
+          p.kill()
+          th.join()
+
       if err or p.returncode != 0:
-        print "WARNING: failure running %s:\n%s\n" % (diff_program, err)
-        return None
+        print "WARNING: failure running %s:\n%s\n" % (
+            diff_program, "".join(err))
+        self.patch = None
+        return None, None, None
       diff = ptemp.read()
     finally:
       ptemp.close()
@@ -955,9 +1023,80 @@
     threads.pop().join()
 
 
+class BlockDifference:
+  def __init__(self, partition, tgt, src=None, check_first_block=False):
+    self.tgt = tgt
+    self.src = src
+    self.partition = partition
+    self.check_first_block = check_first_block
+
+    b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads)
+    tmpdir = tempfile.mkdtemp()
+    OPTIONS.tempfiles.append(tmpdir)
+    self.path = os.path.join(tmpdir, partition)
+    b.Compute(self.path)
+
+    _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
+
+  def WriteScript(self, script, output_zip, progress=None):
+    if not self.src:
+      # write the output unconditionally
+      if progress: script.ShowProgress(progress, 0)
+      self._WriteUpdate(script, output_zip)
+
+    else:
+      if self.check_first_block:
+        self._CheckFirstBlock(script)
+
+      script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' %
+                         (self.device, self.src.care_map.to_string_raw(),
+                          self.src.TotalSha1()))
+      script.Print("Patching %s image..." % (self.partition,))
+      if progress: script.ShowProgress(progress, 0)
+      self._WriteUpdate(script, output_zip)
+      script.AppendExtra(('else\n'
+                          '  (range_sha1("%s", "%s") == "%s") ||\n'
+                          '  abort("%s partition has unexpected contents");\n'
+                          'endif;') %
+                         (self.device, self.tgt.care_map.to_string_raw(),
+                          self.tgt.TotalSha1(), self.partition))
+
+  def _WriteUpdate(self, script, output_zip):
+    partition = self.partition
+    with open(self.path + ".transfer.list", "rb") as f:
+      ZipWriteStr(output_zip, partition + ".transfer.list", f.read())
+    with open(self.path + ".new.dat", "rb") as f:
+      ZipWriteStr(output_zip, partition + ".new.dat", f.read())
+    with open(self.path + ".patch.dat", "rb") as f:
+      ZipWriteStr(output_zip, partition + ".patch.dat", f.read(),
+                         compression=zipfile.ZIP_STORED)
+
+    call = (('block_image_update("%s", '
+             'package_extract_file("%s.transfer.list"), '
+             '"%s.new.dat", "%s.patch.dat");\n') %
+            (self.device, partition, partition, partition))
+    script.AppendExtra(script._WordWrap(call))
+
+  def _CheckFirstBlock(self, script):
+    r = RangeSet((0, 1))
+    h = sha1()
+    for data in self.src.ReadRangeSet(r):
+      h.update(data)
+    h = h.hexdigest()
+
+    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
+                        'abort("%s has been remounted R/W; '
+                        'reflash device to reenable OTA updates");')
+                       % (self.device, r.to_string_raw(), h, self.device))
+
+
+DataImage = blockimgdiff.DataImage
+
+
 # map recovery.fstab's fs_types to mount/format "partition types"
 PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD",
-                    "ext4": "EMMC", "emmc": "EMMC" }
+                    "ext4": "EMMC", "emmc": "EMMC",
+                    "f2fs": "EMMC" }
 
 def GetTypeAndDevice(mount_point, info):
   fstab = info["fstab"]
@@ -980,3 +1119,76 @@
       save = True
   cert = "".join(cert).decode('base64')
   return cert
+
+def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
+                      info_dict=None):
+  """Generate a binary patch that creates the recovery image starting
+  with the boot image.  (Most of the space in these images is just the
+  kernel, which is identical for the two, so the resulting patch
+  should be efficient.)  Add it to the output zip, along with a shell
+  script that is run from init.rc on first boot to actually do the
+  patching and install the new recovery image.
+
+  recovery_img and boot_img should be File objects for the
+  corresponding images.  info should be the dictionary returned by
+  common.LoadInfoDict() on the input target_files.
+  """
+
+  if info_dict is None:
+    info_dict = OPTIONS.info_dict
+
+  diff_program = ["imgdiff"]
+  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
+  if os.path.exists(path):
+    diff_program.append("-b")
+    diff_program.append(path)
+    bonus_args = "-b /system/etc/recovery-resource.dat"
+  else:
+    bonus_args = ""
+
+  d = Difference(recovery_img, boot_img, diff_program=diff_program)
+  _, _, patch = d.ComputePatch()
+  output_sink("recovery-from-boot.p", patch)
+
+  td_pair = GetTypeAndDevice("/boot", info_dict)
+  if not td_pair:
+    return
+  boot_type, boot_device = td_pair
+  td_pair = GetTypeAndDevice("/recovery", info_dict)
+  if not td_pair:
+    return
+  recovery_type, recovery_device = td_pair
+
+  sh = """#!/system/bin/sh
+if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
+  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
+else
+  log -t recovery "Recovery image already installed"
+fi
+""" % { 'boot_size': boot_img.size,
+        'boot_sha1': boot_img.sha1,
+        'recovery_size': recovery_img.size,
+        'recovery_sha1': recovery_img.sha1,
+        'boot_type': boot_type,
+        'boot_device': boot_device,
+        'recovery_type': recovery_type,
+        'recovery_device': recovery_device,
+        'bonus_args': bonus_args,
+        }
+
+  # The install script location moved from /system/etc to /system/bin
+  # in the L release.  Parse the init.rc file to find out where the
+  # target-files expects it to be, and put it there.
+  sh_location = "etc/install-recovery.sh"
+  try:
+    with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
+      for line in f:
+        m = re.match("^service flash_recovery /system/(\S+)\s*$", line)
+        if m:
+          sh_location = m.group(1)
+          print "putting script in", sh_location
+          break
+  except (OSError, IOError), e:
+    print "failed to read init.rc: %s" % (e,)
+
+  output_sink(sh_location, sh)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 426b713..7d318a3 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -68,19 +68,43 @@
     with temporary=True) to this one."""
     self.script.extend(other.script)
 
+  def AssertOemProperty(self, name, value):
+    """Assert that a property on the OEM paritition matches a value."""
+    if not name:
+      raise ValueError("must specify an OEM property")
+    if not value:
+      raise ValueError("must specify the OEM value")
+    cmd = ('file_getprop("/oem/oem.prop", "%s") == "%s" || '
+           'abort("This package expects the value \\"%s\\"  for '
+           '\\"%s\\" on the OEM partition; '
+           'this has value \\"" + file_getprop("/oem/oem.prop") + "\\".");'
+           ) % (name, value, name, value)
+    self.script.append(cmd)
+
   def AssertSomeFingerprint(self, *fp):
-    """Assert that the current system build fingerprint is one of *fp."""
+    """Assert that the current recovery build fingerprint is one of *fp."""
     if not fp:
       raise ValueError("must specify some fingerprints")
     cmd = (
-           ' ||\n    '.join([('file_getprop("/system/build.prop", '
-                         '"ro.build.fingerprint") == "%s"')
+           ' ||\n    '.join([('getprop("ro.build.fingerprint") == "%s"')
                         % i for i in fp]) +
            ' ||\n    abort("Package expects build fingerprint of %s; this '
            'device has " + getprop("ro.build.fingerprint") + ".");'
            ) % (" or ".join(fp),)
     self.script.append(cmd)
 
+  def AssertSomeThumbprint(self, *fp):
+    """Assert that the current recovery build thumbprint is one of *fp."""
+    if not fp:
+      raise ValueError("must specify some thumbprints")
+    cmd = (
+           ' ||\n    '.join([('getprop("ro.build.thumbprint") == "%s"')
+                        % i for i in fp]) +
+           ' ||\n    abort("Package expects build thumbprint of %s; this '
+           'device has " + getprop("ro.build.thumbprint") + ".");'
+           ) % (" or ".join(fp),)
+    self.script.append(cmd)
+
   def AssertOlderBuild(self, timestamp, timestamp_text):
     """Assert that the build on the device is older (or the same as)
     the given timestamp."""
@@ -140,14 +164,25 @@
     self.script.append(('apply_patch_space(%d) || abort("Not enough free space '
                         'on /system to apply patches.");') % (amount,))
 
-  def Mount(self, mount_point):
-    """Mount the partition with the given mount_point."""
+  def Mount(self, mount_point, mount_options_by_format=""):
+    """Mount the partition with the given mount_point.
+      mount_options_by_format:
+      [fs_type=option[,option]...[|fs_type=option[,option]...]...]
+      where option is optname[=optvalue]
+      E.g. ext4=barrier=1,nodelalloc,errors=panic|f2fs=errors=recover
+    """
     fstab = self.info.get("fstab", None)
     if fstab:
       p = fstab[mount_point]
-      self.script.append('mount("%s", "%s", "%s", "%s");' %
+      mount_dict = {}
+      if mount_options_by_format is not None:
+        for option in mount_options_by_format.split("|"):
+          if "=" in option:
+            key, value = option.split("=", 1)
+            mount_dict[key] = value
+      self.script.append('mount("%s", "%s", "%s", "%s", "%s");' %
                          (p.fs_type, common.PARTITION_TYPES[p.fs_type],
-                          p.device, p.mount_point))
+                          p.device, p.mount_point, mount_dict.get(p.fs_type, "")))
       self.mounts.add(p.mount_point)
 
   def UnpackPackageDir(self, src, dst):
@@ -178,6 +213,15 @@
                          (p.fs_type, common.PARTITION_TYPES[p.fs_type],
                           p.device, p.length, p.mount_point))
 
+  def WipeBlockDevice(self, partition):
+    if partition not in ("/system", "/vendor"):
+      raise ValueError(("WipeBlockDevice doesn't work on %s\n") % (partition,))
+    fstab = self.info.get("fstab", None)
+    size = self.info.get(partition.lstrip("/") + "_size", None)
+    device = fstab[partition].device
+
+    self.script.append('wipe_block_device("%s", %s);' % (device, size))
+
   def DeleteFiles(self, file_list):
     """Delete all files in file_list."""
     if not file_list: return
@@ -212,7 +256,7 @@
     cmd = "".join(cmd)
     self.script.append(self._WordWrap(cmd))
 
-  def WriteRawImage(self, mount_point, fn):
+  def WriteRawImage(self, mount_point, fn, mapfn=None):
     """Write the given package file into the partition for the given
     mount point."""
 
@@ -226,8 +270,13 @@
             'write_raw_image(package_extract_file("%(fn)s"), "%(device)s");'
             % args)
       elif partition_type == "EMMC":
-        self.script.append(
-            'package_extract_file("%(fn)s", "%(device)s");' % args)
+        if mapfn:
+          args["map"] = mapfn
+          self.script.append(
+              'package_extract_file("%(fn)s", "%(device)s", "%(map)s");' % args)
+        else:
+          self.script.append(
+              'package_extract_file("%(fn)s", "%(device)s");' % args)
       else:
         raise ValueError("don't know how to write \"%s\" partitions" % (p.fs_type,))
 
@@ -274,6 +323,10 @@
     """Append text verbatim to the output script."""
     self.script.append(extra)
 
+  def Unmount(self, mount_point):
+    self.script.append('unmount("%s");' % (mount_point,))
+    self.mounts.remove(mount_point);
+
   def UnmountAll(self):
     for p in sorted(self.mounts):
       self.script.append('unmount("%s");' % (p,))
@@ -293,6 +346,6 @@
     if input_path is None:
       data = input_zip.read("OTA/bin/updater")
     else:
-      data = open(os.path.join(input_path, "updater")).read()
+      data = open(input_path, "rb").read()
     common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
                        data, perms=0755)
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files
deleted file mode 100755
index d23d465..0000000
--- a/tools/releasetools/img_from_target_files
+++ /dev/null
@@ -1,273 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2008 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Given a target-files zipfile, produces an image zipfile suitable for
-use with 'fastboot update'.
-
-Usage:  img_from_target_files [flags] input_target_files output_image_zip
-
-  -b  (--board_config)  <file>
-      Deprecated.
-
-  -z  (--bootable_zip)
-      Include only the bootable images (eg 'boot' and 'recovery') in
-      the output.
-
-"""
-
-import sys
-
-if sys.hexversion < 0x02040000:
-  print >> sys.stderr, "Python 2.4 or newer is required."
-  sys.exit(1)
-
-import errno
-import os
-import re
-import shutil
-import subprocess
-import tempfile
-import zipfile
-
-# missing in Python 2.4 and before
-if not hasattr(os, "SEEK_SET"):
-  os.SEEK_SET = 0
-
-import build_image
-import common
-
-OPTIONS = common.OPTIONS
-
-
-def AddSystem(output_zip):
-  """Turn the contents of SYSTEM into a system image and store it in
-  output_zip."""
-
-  print "creating system.img..."
-
-  img = tempfile.NamedTemporaryFile()
-
-  # The name of the directory it is making an image out of matters to
-  # mkyaffs2image.  It wants "system" but we have a directory named
-  # "SYSTEM", so create a symlink.
-  try:
-    os.symlink(os.path.join(OPTIONS.input_tmp, "SYSTEM"),
-               os.path.join(OPTIONS.input_tmp, "system"))
-  except OSError, e:
-      # bogus error on my mac version?
-      #   File "./build/tools/releasetools/img_from_target_files", line 86, in AddSystem
-      #     os.path.join(OPTIONS.input_tmp, "system"))
-      # OSError: [Errno 17] File exists
-    if (e.errno == errno.EEXIST):
-      pass
-
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "system")
-  fstab = OPTIONS.info_dict["fstab"]
-  if fstab:
-    image_props["fs_type" ] = fstab["/system"].fs_type
-  succ = build_image.BuildImage(os.path.join(OPTIONS.input_tmp, "system"),
-                                image_props, img.name)
-  assert succ, "build system.img image failed"
-
-  img.seek(os.SEEK_SET, 0)
-  data = img.read()
-  img.close()
-
-  common.CheckSize(data, "system.img", OPTIONS.info_dict)
-  common.ZipWriteStr(output_zip, "system.img", data)
-
-
-def AddVendor(output_zip):
-  """Turn the contents of VENDOR into vendor.img and store it in
-  output_zip."""
-
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "vendor")
-  # The build system has to explicitly request for vendor.img.
-  if "fs_type" not in image_props:
-    return
-
-  print "creating vendor.img..."
-
-  img = tempfile.NamedTemporaryFile()
-
-  # The name of the directory it is making an image out of matters to
-  # mkyaffs2image.  It wants "vendor" but we have a directory named
-  # "VENDOR", so create a symlink or an empty directory if VENDOR does not
-  # exist.
-  if not os.path.exists(os.path.join(OPTIONS.input_tmp, "vendor")):
-    if os.path.exists(os.path.join(OPTIONS.input_tmp, "VENDOR")):
-      os.symlink(os.path.join(OPTIONS.input_tmp, "VENDOR"),
-                 os.path.join(OPTIONS.input_tmp, "vendor"))
-    else:
-      os.mkdir(os.path.join(OPTIONS.input_tmp, "vendor"))
-
-  img = tempfile.NamedTemporaryFile()
-
-  fstab = OPTIONS.info_dict["fstab"]
-  if fstab:
-    image_props["fs_type" ] = fstab["/vendor"].fs_type
-  succ = build_image.BuildImage(os.path.join(OPTIONS.input_tmp, "vendor"),
-                                image_props, img.name)
-  assert succ, "build vendor.img image failed"
-
-  common.CheckSize(img.name, "vendor.img", OPTIONS.info_dict)
-  output_zip.write(img.name, "vendor.img")
-  img.close()
-
-
-def AddUserdata(output_zip):
-  """Create an empty userdata image and store it in output_zip."""
-
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "data")
-  # If no userdata_size is provided for extfs, skip userdata.img.
-  if (image_props.get("fs_type", "").startswith("ext") and
-      not image_props.get("partition_size")):
-    return
-
-  print "creating userdata.img..."
-
-  # The name of the directory it is making an image out of matters to
-  # mkyaffs2image.  So we create a temp dir, and within it we create an
-  # empty dir named "data", and build the image from that.
-  temp_dir = tempfile.mkdtemp()
-  user_dir = os.path.join(temp_dir, "data")
-  os.mkdir(user_dir)
-  img = tempfile.NamedTemporaryFile()
-
-  fstab = OPTIONS.info_dict["fstab"]
-  if fstab:
-    image_props["fs_type" ] = fstab["/data"].fs_type
-  succ = build_image.BuildImage(user_dir, image_props, img.name)
-  assert succ, "build userdata.img image failed"
-
-  common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
-  output_zip.write(img.name, "userdata.img")
-  img.close()
-  os.rmdir(user_dir)
-  os.rmdir(temp_dir)
-
-
-def AddCache(output_zip):
-  """Create an empty cache image and store it in output_zip."""
-
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "cache")
-  # The build system has to explicitly request for cache.img.
-  if "fs_type" not in image_props:
-    return
-
-  print "creating cache.img..."
-
-  # The name of the directory it is making an image out of matters to
-  # mkyaffs2image.  So we create a temp dir, and within it we create an
-  # empty dir named "cache", and build the image from that.
-  temp_dir = tempfile.mkdtemp()
-  user_dir = os.path.join(temp_dir, "cache")
-  os.mkdir(user_dir)
-  img = tempfile.NamedTemporaryFile()
-
-  fstab = OPTIONS.info_dict["fstab"]
-  if fstab:
-    image_props["fs_type" ] = fstab["/cache"].fs_type
-  succ = build_image.BuildImage(user_dir, image_props, img.name)
-  assert succ, "build cache.img image failed"
-
-  common.CheckSize(img.name, "cache.img", OPTIONS.info_dict)
-  output_zip.write(img.name, "cache.img")
-  img.close()
-  os.rmdir(user_dir)
-  os.rmdir(temp_dir)
-
-
-def CopyInfo(output_zip):
-  """Copy the android-info.txt file from the input to the output."""
-  output_zip.write(os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"),
-                   "android-info.txt")
-
-
-def main(argv):
-  bootable_only = [False]
-
-  def option_handler(o, a):
-    if o in ("-b", "--board_config"):
-      pass       # deprecated
-    if o in ("-z", "--bootable_zip"):
-      bootable_only[0] = True
-    else:
-      return False
-    return True
-
-  args = common.ParseOptions(argv, __doc__,
-                             extra_opts="b:z",
-                             extra_long_opts=["board_config=",
-                                              "bootable_zip"],
-                             extra_option_handler=option_handler)
-
-  bootable_only = bootable_only[0]
-
-  if len(args) != 2:
-    common.Usage(__doc__)
-    sys.exit(1)
-
-  OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
-  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-
-  # If this image was originally labelled with SELinux contexts, make sure we
-  # also apply the labels in our new image. During building, the "file_contexts"
-  # is in the out/ directory tree, but for repacking from target-files.zip it's
-  # in the root directory of the ramdisk.
-  if "selinux_fc" in OPTIONS.info_dict:
-    OPTIONS.info_dict["selinux_fc"] = os.path.join(OPTIONS.input_tmp, "BOOT", "RAMDISK",
-        "file_contexts")
-
-  output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
-
-  boot_image = common.GetBootableImage(
-      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-  if boot_image:
-    boot_image.AddToZip(output_zip)
-  recovery_image = common.GetBootableImage(
-      "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-  if recovery_image:
-    recovery_image.AddToZip(output_zip)
-
-  if not bootable_only:
-    AddSystem(output_zip)
-    AddVendor(output_zip)
-    AddUserdata(output_zip)
-    AddCache(output_zip)
-    CopyInfo(output_zip)
-
-  print "cleaning up..."
-  output_zip.close()
-  shutil.rmtree(OPTIONS.input_tmp)
-
-  print "done."
-
-
-if __name__ == '__main__':
-  try:
-    common.CloseInheritedPipes()
-    main(sys.argv[1:])
-  except common.ExternalError, e:
-    print
-    print "   ERROR: %s" % (e,)
-    print
-    sys.exit(1)
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files
new file mode 120000
index 0000000..afaf24b
--- /dev/null
+++ b/tools/releasetools/img_from_target_files
@@ -0,0 +1 @@
+img_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
new file mode 100755
index 0000000..4b88e73
--- /dev/null
+++ b/tools/releasetools/img_from_target_files.py
@@ -0,0 +1,156 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Given a target-files zipfile, produces an image zipfile suitable for
+use with 'fastboot update'.
+
+Usage:  img_from_target_files [flags] input_target_files output_image_zip
+
+  -z  (--bootable_zip)
+      Include only the bootable images (eg 'boot' and 'recovery') in
+      the output.
+
+"""
+
+import sys
+
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
+  sys.exit(1)
+
+import errno
+import os
+import re
+import shutil
+import subprocess
+import tempfile
+import zipfile
+
+# missing in Python 2.4 and before
+if not hasattr(os, "SEEK_SET"):
+  os.SEEK_SET = 0
+
+import common
+
+OPTIONS = common.OPTIONS
+
+
+def CopyInfo(output_zip):
+  """Copy the android-info.txt file from the input to the output."""
+  output_zip.write(os.path.join(OPTIONS.input_tmp, "OTA", "android-info.txt"),
+                   "android-info.txt")
+
+
+def main(argv):
+  bootable_only = [False]
+
+  def option_handler(o, a):
+    if o in ("-z", "--bootable_zip"):
+      bootable_only[0] = True
+    else:
+      return False
+    return True
+
+  args = common.ParseOptions(argv, __doc__,
+                             extra_opts="z",
+                             extra_long_opts=["bootable_zip"],
+                             extra_option_handler=option_handler)
+
+  bootable_only = bootable_only[0]
+
+  if len(args) != 2:
+    common.Usage(__doc__)
+    sys.exit(1)
+
+  OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
+  output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
+  CopyInfo(output_zip)
+
+  try:
+    done = False
+    images_path = os.path.join(OPTIONS.input_tmp, "IMAGES")
+    if os.path.exists(images_path):
+      # If this is a new target-files, it already contains the images,
+      # and all we have to do is copy them to the output zip.
+      images = os.listdir(images_path)
+      if images:
+        for i in images:
+          if bootable_only and i not in ("boot.img", "recovery.img"): continue
+          if not i.endswith(".img"): continue
+          with open(os.path.join(images_path, i), "r") as f:
+            common.ZipWriteStr(output_zip, i, f.read())
+        done = True
+
+    if not done:
+      # We have an old target-files that doesn't already contain the
+      # images, so build them.
+      import add_img_to_target_files
+
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+
+      # If this image was originally labelled with SELinux contexts,
+      # make sure we also apply the labels in our new image. During
+      # building, the "file_contexts" is in the out/ directory tree,
+      # but for repacking from target-files.zip it's in the root
+      # directory of the ramdisk.
+      if "selinux_fc" in OPTIONS.info_dict:
+        OPTIONS.info_dict["selinux_fc"] = os.path.join(
+            OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
+
+      boot_image = common.GetBootableImage(
+          "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+      if boot_image:
+          boot_image.AddToZip(output_zip)
+      recovery_image = common.GetBootableImage(
+          "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+      if recovery_image:
+        recovery_image.AddToZip(output_zip)
+
+      def banner(s):
+        print "\n\n++++ " + s + " ++++\n\n"
+
+      if not bootable_only:
+        banner("AddSystem")
+        add_img_to_target_files.AddSystem(output_zip, prefix="")
+        try:
+          input_zip.getinfo("VENDOR/")
+          banner("AddVendor")
+          add_img_to_target_files.AddVendor(output_zip, prefix="")
+        except KeyError:
+          pass   # no vendor partition for this device
+        banner("AddUserdata")
+        add_img_to_target_files.AddUserdata(output_zip, prefix="")
+        banner("AddCache")
+        add_img_to_target_files.AddCache(output_zip, prefix="")
+
+  finally:
+    print "cleaning up..."
+    output_zip.close()
+    shutil.rmtree(OPTIONS.input_tmp)
+
+  print "done."
+
+
+if __name__ == '__main__':
+  try:
+    common.CloseInheritedPipes()
+    main(sys.argv[1:])
+  except common.ExternalError, e:
+    print
+    print "   ERROR: %s" % (e,)
+    print
+    sys.exit(1)
diff --git a/tools/releasetools/make_recovery_patch b/tools/releasetools/make_recovery_patch
new file mode 100755
index 0000000..08d1450
--- /dev/null
+++ b/tools/releasetools/make_recovery_patch
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
+  sys.exit(1)
+
+import os
+import common
+
+OPTIONS = common.OPTIONS
+
+def main(argv):
+  # def option_handler(o, a):
+  #   return False
+
+  args = common.ParseOptions(argv, __doc__)
+  input_dir, output_dir = args
+
+  OPTIONS.info_dict = common.LoadInfoDict(input_dir)
+
+  recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
+                                         input_dir, "RECOVERY")
+  boot_img = common.GetBootableImage("boot.img", "boot.img",
+                                     input_dir, "BOOT")
+
+  if not recovery_img or not boot_img:
+    sys.exit(0)
+
+  def output_sink(fn, data):
+    with open(os.path.join(output_dir, "SYSTEM", *fn.split("/")), "wb") as f:
+      f.write(data)
+
+  common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
+
+
+if __name__ == '__main__':
+  main(sys.argv[1:])
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
index e695218..755e5c2 100755
--- a/tools/releasetools/ota_from_target_files
+++ b/tools/releasetools/ota_from_target_files
@@ -21,7 +21,7 @@
 
 Usage:  ota_from_target_files [flags] input_target_files output_ota_package
 
-  -b  (--board_config)  <file>
+  --board_config  <file>
       Deprecated.
 
   -k (--package_key) <key> Key to use to sign the package (default is
@@ -37,6 +37,14 @@
       Generate an incremental OTA using the given target-files zip as
       the starting build.
 
+  -v  (--verify)
+      Remount and verify the checksums of the files written to the
+      system and vendor (if used) partitions.  Incremental builds only.
+
+  -o  (--oem_settings)  <file>
+      Use the file to specify the expected OEM-specific properties
+      on the OEM partition of the intended device.
+
   -w  (--wipe_user_data)
       Generate an OTA package that will wipe the user data partition
       when installed.
@@ -57,6 +65,16 @@
       first, so that any changes made to the system partition are done
       using the new recovery (new kernel, etc.).
 
+  --block
+      Generate a block-based OTA if possible.  Will fall back to a
+      file-based OTA if the target_files is older and doesn't support
+      block-based OTAs.
+
+  -b  (--binary)  <file>
+      Use the given binary as the update-binary in the output package,
+      instead of the binary in the build's target_files.  Use for
+      development only.
+
   -t  (--worker_threads) <int>
       Specifies the number of worker-threads that will be used when
       generating patches for incremental updates (defaults to 3).
@@ -65,12 +83,13 @@
 
 import sys
 
-if sys.hexversion < 0x02040000:
-  print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
   sys.exit(1)
 
 import copy
 import errno
+import multiprocessing
 import os
 import re
 import subprocess
@@ -78,17 +97,18 @@
 import time
 import zipfile
 
-try:
-  from hashlib import sha1 as sha1
-except ImportError:
-  from sha import sha as sha1
+from hashlib import sha1 as sha1
 
 import common
 import edify_generator
+import build_image
+import blockimgdiff
+import sparse_img
 
 OPTIONS = common.OPTIONS
 OPTIONS.package_key = None
 OPTIONS.incremental_source = None
+OPTIONS.verify = False
 OPTIONS.require_verbatim = set()
 OPTIONS.prohibit_verbatim = set(("system/build.prop",))
 OPTIONS.patch_threshold = 0.95
@@ -96,9 +116,15 @@
 OPTIONS.omit_prereq = False
 OPTIONS.extra_script = None
 OPTIONS.aslr_mode = True
-OPTIONS.worker_threads = 3
+OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
+if OPTIONS.worker_threads == 0:
+  OPTIONS.worker_threads = 1
 OPTIONS.two_step = False
 OPTIONS.no_signing = False
+OPTIONS.block_based = False
+OPTIONS.updater_binary = None
+OPTIONS.oem_source = None
+OPTIONS.fallback_to_full = True
 
 def MostPopularKey(d, default):
   """Given a dict, return the key corresponding to the largest
@@ -144,50 +170,21 @@
     return result
   return None
 
-class Item:
-  """Items represent the metadata (user, group, mode) of files and
-  directories in the system image."""
-  ITEMS = {}
-  def __init__(self, name, dir=False):
-    self.name = name
-    self.uid = None
-    self.gid = None
-    self.mode = None
-    self.selabel = None
-    self.capabilities = None
-    self.dir = dir
+class ItemSet:
+  def __init__(self, partition, fs_config):
+    self.partition = partition
+    self.fs_config = fs_config
+    self.ITEMS = {}
 
-    if name:
-      self.parent = Item.Get(os.path.dirname(name), dir=True)
-      self.parent.children.append(self)
-    else:
-      self.parent = None
-    if dir:
-      self.children = []
+  def Get(self, name, dir=False):
+    if name not in self.ITEMS:
+      self.ITEMS[name] = Item(self, name, dir=dir)
+    return self.ITEMS[name]
 
-  def Dump(self, indent=0):
-    if self.uid is not None:
-      print "%s%s %d %d %o" % ("  "*indent, self.name, self.uid, self.gid, self.mode)
-    else:
-      print "%s%s %s %s %s" % ("  "*indent, self.name, self.uid, self.gid, self.mode)
-    if self.dir:
-      print "%s%s" % ("  "*indent, self.descendants)
-      print "%s%s" % ("  "*indent, self.best_subtree)
-      for i in self.children:
-        i.Dump(indent=indent+1)
-
-  @classmethod
-  def Get(cls, name, dir=False):
-    if name not in cls.ITEMS:
-      cls.ITEMS[name] = Item(name, dir=dir)
-    return cls.ITEMS[name]
-
-  @classmethod
-  def GetMetadata(cls, input_zip):
-
+  def GetMetadata(self, input_zip):
     # The target_files contains a record of what the uid,
     # gid, and mode are supposed to be.
-    output = input_zip.read("META/filesystem_config.txt")
+    output = input_zip.read(self.fs_config)
 
     for line in output.split("\n"):
       if not line: continue
@@ -205,7 +202,7 @@
         if key == "capabilities":
           capabilities = value
 
-      i = cls.ITEMS.get(name, None)
+      i = self.ITEMS.get(name, None)
       if i is not None:
         i.uid = int(uid)
         i.gid = int(gid)
@@ -216,11 +213,44 @@
           i.children.sort(key=lambda i: i.name)
 
     # set metadata for the files generated by this script.
-    i = cls.ITEMS.get("system/recovery-from-boot.p", None)
+    i = self.ITEMS.get("system/recovery-from-boot.p", None)
     if i: i.uid, i.gid, i.mode, i.selabel, i.capabilities = 0, 0, 0644, None, None
-    i = cls.ITEMS.get("system/etc/install-recovery.sh", None)
+    i = self.ITEMS.get("system/etc/install-recovery.sh", None)
     if i: i.uid, i.gid, i.mode, i.selabel, i.capabilities = 0, 0, 0544, None, None
 
+
+class Item:
+  """Items represent the metadata (user, group, mode) of files and
+  directories in the system image."""
+  def __init__(self, itemset, name, dir=False):
+    self.itemset = itemset
+    self.name = name
+    self.uid = None
+    self.gid = None
+    self.mode = None
+    self.selabel = None
+    self.capabilities = None
+    self.dir = dir
+
+    if name:
+      self.parent = itemset.Get(os.path.dirname(name), dir=True)
+      self.parent.children.append(self)
+    else:
+      self.parent = None
+    if dir:
+      self.children = []
+
+  def Dump(self, indent=0):
+    if self.uid is not None:
+      print "%s%s %d %d %o" % ("  "*indent, self.name, self.uid, self.gid, self.mode)
+    else:
+      print "%s%s %s %s %s" % ("  "*indent, self.name, self.uid, self.gid, self.mode)
+    if self.dir:
+      print "%s%s" % ("  "*indent, self.descendants)
+      print "%s%s" % ("  "*indent, self.best_subtree)
+      for i in self.children:
+        i.Dump(indent=indent+1)
+
   def CountChildMetadata(self):
     """Count up the (uid, gid, mode, selabel, capabilities) tuples for
     all children and determine the best strategy for using set_perm_recursive and
@@ -305,9 +335,8 @@
     recurse(self, (-1, -1, -1, -1, None, None))
 
 
-def CopySystemFiles(input_zip, output_zip=None,
-                    substitute=None):
-  """Copies files underneath system/ in the input zip to the output
+def CopyPartitionFiles(itemset, input_zip, output_zip=None, substitute=None):
+  """Copies files for the partition in the input zip to the output
   zip.  Populates the Item class with their metadata, and returns a
   list of symlinks.  output_zip may be None, in which case the copy is
   skipped (but the other side effects still happen).  substitute is an
@@ -317,15 +346,17 @@
 
   symlinks = []
 
+  partition = itemset.partition
+
   for info in input_zip.infolist():
-    if info.filename.startswith("SYSTEM/"):
+    if info.filename.startswith(partition.upper() + "/"):
       basefilename = info.filename[7:]
       if IsSymlink(info):
         symlinks.append((input_zip.read(info.filename),
-                         "/system/" + basefilename))
+                         "/" + partition + "/" + basefilename))
       else:
         info2 = copy.copy(info)
-        fn = info2.filename = "system/" + basefilename
+        fn = info2.filename = partition + "/" + basefilename
         if substitute and fn in substitute and substitute[fn] is None:
           continue
         if output_zip is not None:
@@ -335,9 +366,9 @@
             data = input_zip.read(info.filename)
           output_zip.writestr(info2, data)
         if fn.endswith("/"):
-          Item.Get(fn[:-1], dir=True)
+          itemset.Get(fn[:-1], dir=True)
         else:
-          Item.Get(fn, dir=False)
+          itemset.Get(fn, dir=False)
 
   symlinks.sort()
   return symlinks
@@ -351,63 +382,82 @@
                   whole_file=True)
 
 
-def AppendAssertions(script, info_dict):
-  device = GetBuildProp("ro.product.device", info_dict)
-  script.AssertDevice(device)
-
-
-def MakeRecoveryPatch(input_tmp, output_zip, recovery_img, boot_img):
-  """Generate a binary patch that creates the recovery image starting
-  with the boot image.  (Most of the space in these images is just the
-  kernel, which is identical for the two, so the resulting patch
-  should be efficient.)  Add it to the output zip, along with a shell
-  script that is run from init.rc on first boot to actually do the
-  patching and install the new recovery image.
-
-  recovery_img and boot_img should be File objects for the
-  corresponding images.  info should be the dictionary returned by
-  common.LoadInfoDict() on the input target_files.
-
-  Returns an Item for the shell script, which must be made
-  executable.
-  """
-
-  diff_program = ["imgdiff"]
-  path = os.path.join(input_tmp, "SYSTEM", "etc", "recovery-resource.dat")
-  if os.path.exists(path):
-    diff_program.append("-b")
-    diff_program.append(path)
-    bonus_args = "-b /system/etc/recovery-resource.dat"
+def AppendAssertions(script, info_dict, oem_dict = None):
+  oem_props = info_dict.get("oem_fingerprint_properties")
+  if oem_props is None or len(oem_props) == 0:
+    device = GetBuildProp("ro.product.device", info_dict)
+    script.AssertDevice(device)
   else:
-    bonus_args = ""
+    if oem_dict is None:
+      raise common.ExternalError("No OEM file provided to answer expected assertions")
+    for prop in oem_props.split():
+      if oem_dict.get(prop) is None:
+        raise common.ExternalError("The OEM file is missing the property %s" % prop)
+      script.AssertOemProperty(prop, oem_dict.get(prop))
 
-  d = common.Difference(recovery_img, boot_img, diff_program=diff_program)
-  _, _, patch = d.ComputePatch()
-  common.ZipWriteStr(output_zip, "recovery/recovery-from-boot.p", patch)
-  Item.Get("system/recovery-from-boot.p", dir=False)
 
-  boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
-  recovery_type, recovery_device = common.GetTypeAndDevice("/recovery", OPTIONS.info_dict)
+def HasRecoveryPatch(target_files_zip):
+  try:
+    target_files_zip.getinfo("SYSTEM/recovery-from-boot.p")
+    return True
+  except KeyError:
+    return False
 
-  sh = """#!/system/bin/sh
-if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
-  log -t recovery "Installing new recovery image"
-  applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p
-else
-  log -t recovery "Recovery image already installed"
-fi
-""" % { 'boot_size': boot_img.size,
-        'boot_sha1': boot_img.sha1,
-        'recovery_size': recovery_img.size,
-        'recovery_sha1': recovery_img.sha1,
-        'boot_type': boot_type,
-        'boot_device': boot_device,
-        'recovery_type': recovery_type,
-        'recovery_device': recovery_device,
-        'bonus_args': bonus_args,
-        }
-  common.ZipWriteStr(output_zip, "recovery/etc/install-recovery.sh", sh)
-  return Item.Get("system/etc/install-recovery.sh", dir=False)
+def HasVendorPartition(target_files_zip):
+  try:
+    target_files_zip.getinfo("VENDOR/")
+    return True
+  except KeyError:
+    return False
+
+def GetOemProperty(name, oem_props, oem_dict, info_dict):
+  if oem_props is not None and name in oem_props:
+    return oem_dict[name]
+  return GetBuildProp(name, info_dict)
+
+
+def CalculateFingerprint(oem_props, oem_dict, info_dict):
+  if oem_props is None:
+    return GetBuildProp("ro.build.fingerprint", info_dict)
+  return "%s/%s/%s:%s" % (
+    GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
+    GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
+    GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
+    GetBuildProp("ro.build.thumbprint", info_dict))
+
+
+def GetImage(which, tmpdir, info_dict):
+  # Return an image object (suitable for passing to BlockImageDiff)
+  # for the 'which' partition (most be "system" or "vendor").  If a
+  # prebuilt image and file map are found in tmpdir they are used,
+  # otherwise they are reconstructed from the individual files.
+
+  assert which in ("system", "vendor")
+
+  path = os.path.join(tmpdir, "IMAGES", which + ".img")
+  mappath = os.path.join(tmpdir, "IMAGES", which + ".map")
+  if os.path.exists(path) and os.path.exists(mappath):
+    print "using %s.img from target-files" % (which,)
+    # This is a 'new' target-files, which already has the image in it.
+
+  else:
+    print "building %s.img from target-files" % (which,)
+
+    # This is an 'old' target-files, which does not contain images
+    # already built.  Build them.
+
+    mappath = tempfile.mkstemp()[1]
+    OPTIONS.tempfiles.append(mappath)
+
+    import add_img_to_target_files
+    if which == "system":
+      path = add_img_to_target_files.BuildSystem(
+          tmpdir, info_dict, block_list=mappath)
+    elif which == "vendor":
+      path = add_img_to_target_files.BuildVendor(
+          tmpdir, info_dict, block_list=mappath)
+
+  return sparse_img.SparseImage(path, mappath)
 
 
 def WriteFullOTAPackage(input_zip, output_zip):
@@ -416,9 +466,18 @@
   # change very often.
   script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
 
-  metadata = {"post-build": GetBuildProp("ro.build.fingerprint",
-                                         OPTIONS.info_dict),
-              "pre-device": GetBuildProp("ro.product.device",
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+  recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
+  oem_dict = None
+  if oem_props is not None and len(oem_props) > 0:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    script.Mount("/oem", recovery_mount_options)
+    oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+  metadata = {"post-build": CalculateFingerprint(
+                               oem_props, oem_dict, OPTIONS.info_dict),
+              "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
                                          OPTIONS.info_dict),
               "post-timestamp": GetBuildProp("ro.build.date.utc",
                                              OPTIONS.info_dict),
@@ -433,12 +492,15 @@
       metadata=metadata,
       info_dict=OPTIONS.info_dict)
 
+  has_recovery_patch = HasRecoveryPatch(input_zip)
+  block_based = OPTIONS.block_based and has_recovery_patch
+
   if not OPTIONS.omit_prereq:
     ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
     ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
     script.AssertOlderBuild(ts, ts_text)
 
-  AppendAssertions(script, OPTIONS.info_dict)
+  AppendAssertions(script, OPTIONS.info_dict, oem_dict)
   device_specific.FullOTA_Assertions()
 
   # Two-step package strategy (in chronological order, which is *not*
@@ -471,48 +533,91 @@
     bcb_dev = {"bcb_dev": fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data)
     script.AppendExtra("""
-if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+if get_stage("%(bcb_dev)s") == "2/3" then
 """ % bcb_dev)
     script.WriteRawImage("/recovery", "recovery.img")
     script.AppendExtra("""
 set_stage("%(bcb_dev)s", "3/3");
 reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s", "stage") == "3/3" then
+else if get_stage("%(bcb_dev)s") == "3/3" then
 """ % bcb_dev)
 
   device_specific.FullOTA_InstallBegin()
 
-  script.ShowProgress(0.5, 0)
+  system_progress = 0.75
 
   if OPTIONS.wipe_user_data:
-    script.FormatPartition("/data")
+    system_progress -= 0.1
+  if HasVendorPartition(input_zip):
+    system_progress -= 0.1
 
   if "selinux_fc" in OPTIONS.info_dict:
     WritePolicyConfig(OPTIONS.info_dict["selinux_fc"], output_zip)
 
-  script.FormatPartition("/system")
-  script.Mount("/system")
-  script.UnpackPackageDir("recovery", "/system")
-  script.UnpackPackageDir("system", "/system")
+  recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
 
-  symlinks = CopySystemFiles(input_zip, output_zip)
-  script.MakeSymlinks(symlinks)
+  system_items = ItemSet("system", "META/filesystem_config.txt")
+  script.ShowProgress(system_progress, 0)
+  if block_based:
+    # Full OTA is done as an "incremental" against an empty source
+    # image.  This has the effect of writing new data from the package
+    # to the entire partition, but lets us reuse the updater code that
+    # writes incrementals to do it.
+    system_tgt = GetImage("system", OPTIONS.input_tmp, OPTIONS.info_dict)
+    system_tgt.ResetFileMap()
+    system_diff = common.BlockDifference("system", system_tgt, src=None)
+    system_diff.WriteScript(script, output_zip)
+  else:
+    script.FormatPartition("/system")
+    script.Mount("/system", recovery_mount_options)
+    if not has_recovery_patch:
+      script.UnpackPackageDir("recovery", "/system")
+    script.UnpackPackageDir("system", "/system")
+
+    symlinks = CopyPartitionFiles(system_items, input_zip, output_zip)
+    script.MakeSymlinks(symlinks)
 
   boot_img = common.GetBootableImage("boot.img", "boot.img",
                                      OPTIONS.input_tmp, "BOOT")
-  MakeRecoveryPatch(OPTIONS.input_tmp, output_zip, recovery_img, boot_img)
 
-  Item.GetMetadata(input_zip)
-  Item.Get("system").SetPermissions(script)
+  if not block_based:
+    def output_sink(fn, data):
+      common.ZipWriteStr(output_zip, "recovery/" + fn, data)
+      system_items.Get("system/" + fn, dir=False)
+
+    common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink,
+                             recovery_img, boot_img)
+
+    system_items.GetMetadata(input_zip)
+    system_items.Get("system").SetPermissions(script)
+
+  if HasVendorPartition(input_zip):
+    vendor_items = ItemSet("vendor", "META/vendor_filesystem_config.txt")
+    script.ShowProgress(0.1, 0)
+
+    if block_based:
+      vendor_tgt = GetImage("vendor", OPTIONS.input_tmp, OPTIONS.info_dict)
+      vendor_tgt.ResetFileMap()
+      vendor_diff = common.BlockDifference("vendor", vendor_tgt)
+      vendor_diff.WriteScript(script, output_zip)
+    else:
+      script.FormatPartition("/vendor")
+      script.Mount("/vendor", recovery_mount_options)
+      script.UnpackPackageDir("vendor", "/vendor")
+
+      symlinks = CopyPartitionFiles(vendor_items, input_zip, output_zip)
+      script.MakeSymlinks(symlinks)
+
+      vendor_items.GetMetadata(input_zip)
+      vendor_items.Get("vendor").SetPermissions(script)
 
   common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
   common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
-  script.ShowProgress(0.2, 0)
 
-  script.ShowProgress(0.2, 10)
+  script.ShowProgress(0.05, 5)
   script.WriteRawImage("/boot", "boot.img")
 
-  script.ShowProgress(0.1, 0)
+  script.ShowProgress(0.2, 10)
   device_specific.FullOTA_InstallEnd()
 
   if OPTIONS.extra_script is not None:
@@ -520,6 +625,10 @@
 
   script.UnmountAll()
 
+  if OPTIONS.wipe_user_data:
+    script.ShowProgress(0.1, 10)
+    script.FormatPartition("/data")
+
   if OPTIONS.two_step:
     script.AppendExtra("""
 set_stage("%(bcb_dev)s", "");
@@ -532,9 +641,10 @@
 endif;
 endif;
 """ % bcb_dev)
-  script.AddToZip(input_zip, output_zip)
+  script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
   WriteMetadata(metadata, output_zip)
 
+
 def WritePolicyConfig(file_context, output_zip):
   f = open(file_context, 'r');
   basename = os.path.basename(file_context)
@@ -546,14 +656,16 @@
                      "".join(["%s=%s\n" % kv
                               for kv in sorted(metadata.iteritems())]))
 
-def LoadSystemFiles(z):
-  """Load all the files from SYSTEM/... in a given target-files
+
+def LoadPartitionFiles(z, partition):
+  """Load all the files from the given partition in a given target-files
   ZipFile, and return a dict of {filename: File object}."""
   out = {}
+  prefix = partition.upper() + "/"
   for info in z.infolist():
-    if info.filename.startswith("SYSTEM/") and not IsSymlink(info):
+    if info.filename.startswith(prefix) and not IsSymlink(info):
       basefilename = info.filename[7:]
-      fn = "system/" + basefilename
+      fn = partition + "/" + basefilename
       data = z.read(info.filename)
       out[fn] = common.File(fn, data)
   return out
@@ -564,7 +676,8 @@
   try:
     return info_dict.get("build.prop", {})[prop]
   except KeyError:
-    raise common.ExternalError("couldn't find %s in build.prop" % (property,))
+    raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
+
 
 def AddToKnownPaths(filename, known_paths):
   if filename[-1] == "/":
@@ -577,7 +690,8 @@
     known_paths.add(path)
     dirs.pop()
 
-def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
   source_version = OPTIONS.source_info_dict["recovery_api_version"]
   target_version = OPTIONS.target_info_dict["recovery_api_version"]
 
@@ -603,82 +717,11 @@
       metadata=metadata,
       info_dict=OPTIONS.info_dict)
 
-  print "Loading target..."
-  target_data = LoadSystemFiles(target_zip)
-  print "Loading source..."
-  source_data = LoadSystemFiles(source_zip)
-
-  verbatim_targets = []
-  patch_list = []
-  diffs = []
-  renames = {}
-  known_paths = set()
-  largest_source_size = 0
-
-  matching_file_cache = {}
-  for fn, sf in source_data.items():
-    assert fn == sf.name
-    matching_file_cache["path:" + fn] = sf
-    if fn in target_data.keys():
-      AddToKnownPaths(fn, known_paths)
-    # Only allow eligibility for filename/sha matching
-    # if there isn't a perfect path match.
-    if target_data.get(sf.name) is None:
-      matching_file_cache["file:" + fn.split("/")[-1]] = sf
-      matching_file_cache["sha:" + sf.sha1] = sf
-
-  for fn in sorted(target_data.keys()):
-    tf = target_data[fn]
-    assert fn == tf.name
-    sf = ClosestFileMatch(tf, matching_file_cache, renames)
-    if sf is not None and sf.name != tf.name:
-      print "File has moved from " + sf.name + " to " + tf.name
-      renames[sf.name] = tf
-
-    if sf is None or fn in OPTIONS.require_verbatim:
-      # This file should be included verbatim
-      if fn in OPTIONS.prohibit_verbatim:
-        raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
-      print "send", fn, "verbatim"
-      tf.AddToZip(output_zip)
-      verbatim_targets.append((fn, tf.size))
-      if fn in target_data.keys():
-        AddToKnownPaths(fn, known_paths)
-    elif tf.sha1 != sf.sha1:
-      # File is different; consider sending as a patch
-      diffs.append(common.Difference(tf, sf))
-    else:
-      # Target file data identical to source (may still be renamed)
-      pass
-
-  common.ComputeDifferences(diffs)
-
-  for diff in diffs:
-    tf, sf, d = diff.GetPatch()
-    path = "/".join(tf.name.split("/")[:-1])
-    if d is None or len(d) > tf.size * OPTIONS.patch_threshold or \
-        path not in known_paths:
-      # patch is almost as big as the file; don't bother patching
-      # or a patch + rename cannot take place due to the target 
-      # directory not existing
-      tf.AddToZip(output_zip)
-      verbatim_targets.append((tf.name, tf.size))
-      if sf.name in renames:
-        del renames[sf.name]
-      AddToKnownPaths(tf.name, known_paths)
-    else:
-      common.ZipWriteStr(output_zip, "patch/" + sf.name + ".p", d)
-      patch_list.append((tf, sf, tf.size, common.sha1(d).hexdigest()))
-      largest_source_size = max(largest_source_size, sf.size)
-
   source_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.source_info_dict)
   target_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.target_info_dict)
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
 
-  script.Mount("/system")
-  script.AssertSomeFingerprint(source_fp, target_fp)
-
   source_boot = common.GetBootableImage(
       "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
       OPTIONS.source_info_dict)
@@ -694,13 +737,31 @@
       "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
   updating_recovery = (source_recovery.data != target_recovery.data)
 
-  # Here's how we divide up the progress bar:
-  #  0.1 for verifying the start state (PatchCheck calls)
-  #  0.8 for applying patches (ApplyPatch calls)
-  #  0.1 for unpacking verbatim files, symlinking, and doing the
-  #      device-specific commands.
+  system_src = GetImage("system", OPTIONS.source_tmp, OPTIONS.source_info_dict)
+  system_tgt = GetImage("system", OPTIONS.target_tmp, OPTIONS.target_info_dict)
+  system_diff = common.BlockDifference("system", system_tgt, system_src,
+                                       check_first_block=True)
 
-  AppendAssertions(script, OPTIONS.target_info_dict)
+  if HasVendorPartition(target_zip):
+    if not HasVendorPartition(source_zip):
+      raise RuntimeError("can't generate incremental that adds /vendor")
+    vendor_src = GetImage("vendor", OPTIONS.source_tmp, OPTIONS.source_info_dict)
+    vendor_tgt = GetImage("vendor", OPTIONS.target_tmp, OPTIONS.target_info_dict)
+    vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
+                                         check_first_block=True)
+  else:
+    vendor_diff = None
+
+  oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
+  recovery_mount_options = OPTIONS.target_info_dict.get("recovery_mount_options")
+  oem_dict = None
+  if oem_props is not None and len(oem_props) > 0:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    script.Mount("/oem", recovery_mount_options)
+    oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+  AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
   device_specific.IncrementalOTA_Assertions()
 
   # Two-step incremental package strategy (in chronological order,
@@ -733,14 +794,370 @@
     bcb_dev = {"bcb_dev": fs.device}
     common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
     script.AppendExtra("""
-if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+if get_stage("%(bcb_dev)s") == "2/3" then
 """ % bcb_dev)
     script.AppendExtra("sleep(20);\n");
     script.WriteRawImage("/recovery", "recovery.img")
     script.AppendExtra("""
 set_stage("%(bcb_dev)s", "3/3");
 reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s", "stage") != "3/3" then
+else if get_stage("%(bcb_dev)s") != "3/3" then
+""" % bcb_dev)
+
+  script.Print("Verifying current system...")
+
+  device_specific.IncrementalOTA_VerifyBegin()
+
+  if oem_props is None:
+    script.AssertSomeFingerprint(source_fp, target_fp)
+  else:
+    script.AssertSomeThumbprint(
+        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
+        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+
+  if updating_boot:
+    boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
+    d = common.Difference(target_boot, source_boot)
+    _, _, d = d.ComputePatch()
+    if d is None:
+      include_full_boot = True
+      common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
+    else:
+      include_full_boot = False
+
+      print "boot      target: %d  source: %d  diff: %d" % (
+          target_boot.size, source_boot.size, len(d))
+
+      common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
+
+      script.PatchCheck("%s:%s:%d:%s:%d:%s" %
+                        (boot_type, boot_device,
+                         source_boot.size, source_boot.sha1,
+                         target_boot.size, target_boot.sha1))
+
+  device_specific.IncrementalOTA_VerifyEnd()
+
+  if OPTIONS.two_step:
+    script.WriteRawImage("/boot", "recovery.img")
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "2/3");
+reboot_now("%(bcb_dev)s", "");
+else
+""" % bcb_dev)
+
+  script.Comment("---- start making changes here ----")
+
+  device_specific.IncrementalOTA_InstallBegin()
+
+  system_diff.WriteScript(script, output_zip,
+                          progress=0.8 if vendor_diff else 0.9)
+  if vendor_diff:
+    vendor_diff.WriteScript(script, output_zip, progress=0.1)
+
+  if OPTIONS.two_step:
+    common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
+    script.WriteRawImage("/boot", "boot.img")
+    print "writing full boot image (forced by two-step mode)"
+
+  if not OPTIONS.two_step:
+    if updating_boot:
+      if include_full_boot:
+        print "boot image changed; including full."
+        script.Print("Installing boot image...")
+        script.WriteRawImage("/boot", "boot.img")
+      else:
+        # Produce the boot image by applying a patch to the current
+        # contents of the boot partition, and write it back to the
+        # partition.
+        print "boot image changed; including patch."
+        script.Print("Patching boot image...")
+        script.ShowProgress(0.1, 10)
+        script.ApplyPatch("%s:%s:%d:%s:%d:%s"
+                          % (boot_type, boot_device,
+                             source_boot.size, source_boot.sha1,
+                             target_boot.size, target_boot.sha1),
+                          "-",
+                          target_boot.size, target_boot.sha1,
+                          source_boot.sha1, "patch/boot.img.p")
+    else:
+      print "boot image unchanged; skipping."
+
+  # Do device-specific installation (eg, write radio image).
+  device_specific.IncrementalOTA_InstallEnd()
+
+  if OPTIONS.extra_script is not None:
+    script.AppendExtra(OPTIONS.extra_script)
+
+  if OPTIONS.wipe_user_data:
+    script.Print("Erasing user data...")
+    script.FormatPartition("/data")
+
+  if OPTIONS.two_step:
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "");
+endif;
+endif;
+""" % bcb_dev)
+
+  script.SetProgress(1)
+  script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+  WriteMetadata(metadata, output_zip)
+
+
+class FileDifference:
+  def __init__(self, partition, source_zip, target_zip, output_zip):
+    print "Loading target..."
+    self.target_data = target_data = LoadPartitionFiles(target_zip, partition)
+    print "Loading source..."
+    self.source_data = source_data = LoadPartitionFiles(source_zip, partition)
+
+    self.verbatim_targets = verbatim_targets = []
+    self.patch_list = patch_list = []
+    diffs = []
+    self.renames = renames = {}
+    known_paths = set()
+    largest_source_size = 0
+
+    matching_file_cache = {}
+    for fn, sf in source_data.items():
+      assert fn == sf.name
+      matching_file_cache["path:" + fn] = sf
+      if fn in target_data.keys():
+        AddToKnownPaths(fn, known_paths)
+      # Only allow eligibility for filename/sha matching
+      # if there isn't a perfect path match.
+      if target_data.get(sf.name) is None:
+        matching_file_cache["file:" + fn.split("/")[-1]] = sf
+        matching_file_cache["sha:" + sf.sha1] = sf
+
+    for fn in sorted(target_data.keys()):
+      tf = target_data[fn]
+      assert fn == tf.name
+      sf = ClosestFileMatch(tf, matching_file_cache, renames)
+      if sf is not None and sf.name != tf.name:
+        print "File has moved from " + sf.name + " to " + tf.name
+        renames[sf.name] = tf
+
+      if sf is None or fn in OPTIONS.require_verbatim:
+        # This file should be included verbatim
+        if fn in OPTIONS.prohibit_verbatim:
+          raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
+        print "send", fn, "verbatim"
+        tf.AddToZip(output_zip)
+        verbatim_targets.append((fn, tf.size, tf.sha1))
+        if fn in target_data.keys():
+          AddToKnownPaths(fn, known_paths)
+      elif tf.sha1 != sf.sha1:
+        # File is different; consider sending as a patch
+        diffs.append(common.Difference(tf, sf))
+      else:
+        # Target file data identical to source (may still be renamed)
+        pass
+
+    common.ComputeDifferences(diffs)
+
+    for diff in diffs:
+      tf, sf, d = diff.GetPatch()
+      path = "/".join(tf.name.split("/")[:-1])
+      if d is None or len(d) > tf.size * OPTIONS.patch_threshold or \
+          path not in known_paths:
+        # patch is almost as big as the file; don't bother patching
+        # or a patch + rename cannot take place due to the target
+        # directory not existing
+        tf.AddToZip(output_zip)
+        verbatim_targets.append((tf.name, tf.size, tf.sha1))
+        if sf.name in renames:
+          del renames[sf.name]
+        AddToKnownPaths(tf.name, known_paths)
+      else:
+        common.ZipWriteStr(output_zip, "patch/" + sf.name + ".p", d)
+        patch_list.append((tf, sf, tf.size, common.sha1(d).hexdigest()))
+        largest_source_size = max(largest_source_size, sf.size)
+
+    self.largest_source_size = largest_source_size
+
+  def EmitVerification(self, script):
+    so_far = 0
+    for tf, sf, size, patch_sha in self.patch_list:
+      if tf.name != sf.name:
+        script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
+      script.PatchCheck("/"+sf.name, tf.sha1, sf.sha1)
+      so_far += sf.size
+    return so_far
+
+  def EmitExplicitTargetVerification(self, script):
+    for fn, size, sha1 in self.verbatim_targets:
+      if (fn[-1] != "/"):
+        script.FileCheck("/"+fn, sha1)
+    for tf, _, _, _ in self.patch_list:
+      script.FileCheck(tf.name, tf.sha1)
+
+  def RemoveUnneededFiles(self, script, extras=()):
+    script.DeleteFiles(["/"+i[0] for i in self.verbatim_targets] +
+                       ["/"+i for i in sorted(self.source_data)
+                              if i not in self.target_data and
+                              i not in self.renames] +
+                       list(extras))
+
+  def TotalPatchSize(self):
+    return sum(i[1].size for i in self.patch_list)
+
+  def EmitPatches(self, script, total_patch_size, so_far):
+    self.deferred_patch_list = deferred_patch_list = []
+    for item in self.patch_list:
+      tf, sf, size, _ = item
+      if tf.name == "system/build.prop":
+        deferred_patch_list.append(item)
+        continue
+      if (sf.name != tf.name):
+        script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
+      script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
+      so_far += tf.size
+      script.SetProgress(so_far / total_patch_size)
+    return so_far
+
+  def EmitDeferredPatches(self, script):
+    for item in self.deferred_patch_list:
+      tf, sf, size, _ = item
+      script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
+    script.SetPermissions("/system/build.prop", 0, 0, 0644, None, None)
+
+  def EmitRenames(self, script):
+    if len(self.renames) > 0:
+      script.Print("Renaming files...")
+      for src, tgt in self.renames.iteritems():
+        print "Renaming " + src + " to " + tgt.name
+        script.RenameFile(src, tgt.name)
+
+
+
+
+def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
+  target_has_recovery_patch = HasRecoveryPatch(target_zip)
+  source_has_recovery_patch = HasRecoveryPatch(source_zip)
+
+  if (OPTIONS.block_based and
+      target_has_recovery_patch and
+      source_has_recovery_patch):
+    return WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip)
+
+  source_version = OPTIONS.source_info_dict["recovery_api_version"]
+  target_version = OPTIONS.target_info_dict["recovery_api_version"]
+
+  if source_version == 0:
+    print ("WARNING: generating edify script for a source that "
+           "can't install it.")
+  script = edify_generator.EdifyGenerator(source_version,
+                                          OPTIONS.target_info_dict)
+
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+  recovery_mount_options = OPTIONS.info_dict.get("recovery_mount_options")
+  oem_dict = None
+  if oem_props is not None and len(oem_props) > 0:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    script.Mount("/oem", recovery_mount_options)
+    oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+  metadata = {"pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+                                         OPTIONS.source_info_dict),
+              "post-timestamp": GetBuildProp("ro.build.date.utc",
+                                             OPTIONS.target_info_dict),
+              }
+
+  device_specific = common.DeviceSpecificParams(
+      source_zip=source_zip,
+      source_version=source_version,
+      target_zip=target_zip,
+      target_version=target_version,
+      output_zip=output_zip,
+      script=script,
+      metadata=metadata,
+      info_dict=OPTIONS.info_dict)
+
+  system_diff = FileDifference("system", source_zip, target_zip, output_zip)
+  script.Mount("/system", recovery_mount_options)
+  if HasVendorPartition(target_zip):
+    vendor_diff = FileDifference("vendor", source_zip, target_zip, output_zip)
+    script.Mount("/vendor", recovery_mount_options)
+  else:
+    vendor_diff = None
+
+  target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.target_info_dict)
+  source_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.source_info_dict)
+
+  if oem_props is None:
+    script.AssertSomeFingerprint(source_fp, target_fp)
+  else:
+    script.AssertSomeThumbprint(
+        GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
+        GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+
+  metadata["pre-build"] = source_fp
+  metadata["post-build"] = target_fp
+
+  source_boot = common.GetBootableImage(
+      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
+      OPTIONS.source_info_dict)
+  target_boot = common.GetBootableImage(
+      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+  updating_boot = (not OPTIONS.two_step and
+                   (source_boot.data != target_boot.data))
+
+  source_recovery = common.GetBootableImage(
+      "/tmp/recovery.img", "recovery.img", OPTIONS.source_tmp, "RECOVERY",
+      OPTIONS.source_info_dict)
+  target_recovery = common.GetBootableImage(
+      "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
+  updating_recovery = (source_recovery.data != target_recovery.data)
+
+  # Here's how we divide up the progress bar:
+  #  0.1 for verifying the start state (PatchCheck calls)
+  #  0.8 for applying patches (ApplyPatch calls)
+  #  0.1 for unpacking verbatim files, symlinking, and doing the
+  #      device-specific commands.
+
+  AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+  device_specific.IncrementalOTA_Assertions()
+
+  # Two-step incremental package strategy (in chronological order,
+  # which is *not* the order in which the generated script has
+  # things):
+  #
+  # if stage is not "2/3" or "3/3":
+  #    do verification on current system
+  #    write recovery image to boot partition
+  #    set stage to "2/3"
+  #    reboot to boot partition and restart recovery
+  # else if stage is "2/3":
+  #    write recovery image to recovery partition
+  #    set stage to "3/3"
+  #    reboot to recovery partition and restart recovery
+  # else:
+  #    (stage must be "3/3")
+  #    perform update:
+  #       patch system files, etc.
+  #       force full install of new boot image
+  #       set up system to update recovery partition on first boot
+  #    complete script normally (allow recovery to mark itself finished and reboot)
+
+  if OPTIONS.two_step:
+    if not OPTIONS.info_dict.get("multistage_support", None):
+      assert False, "two-step packages not supported by this build"
+    fs = OPTIONS.info_dict["fstab"]["/misc"]
+    assert fs.fs_type.upper() == "EMMC", \
+        "two-step packages only supported on devices with EMMC /misc partitions"
+    bcb_dev = {"bcb_dev": fs.device}
+    common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
+    script.AppendExtra("""
+if get_stage("%(bcb_dev)s") == "2/3" then
+""" % bcb_dev)
+    script.AppendExtra("sleep(20);\n");
+    script.WriteRawImage("/recovery", "recovery.img")
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "3/3");
+reboot_now("%(bcb_dev)s", "recovery");
+else if get_stage("%(bcb_dev)s") != "3/3" then
 """ % bcb_dev)
 
   script.Print("Verifying current system...")
@@ -748,17 +1165,9 @@
   device_specific.IncrementalOTA_VerifyBegin()
 
   script.ShowProgress(0.1, 0)
-  total_verify_size = float(sum([i[1].size for i in patch_list]) + 1)
-  if updating_boot:
-    total_verify_size += source_boot.size
-  so_far = 0
-
-  for tf, sf, size, patch_sha in patch_list:
-    if tf.name != sf.name:
-      script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
-    script.PatchCheck("/"+sf.name, tf.sha1, sf.sha1)
-    so_far += sf.size
-    script.SetProgress(so_far / total_verify_size)
+  so_far = system_diff.EmitVerification(script)
+  if vendor_diff:
+    so_far += vendor_diff.EmitVerification(script)
 
   if updating_boot:
     d = common.Difference(target_boot, source_boot)
@@ -775,10 +1184,13 @@
                        source_boot.size, source_boot.sha1,
                        target_boot.size, target_boot.sha1))
     so_far += source_boot.size
-    script.SetProgress(so_far / total_verify_size)
 
-  if patch_list or updating_recovery or updating_boot:
-    script.CacheFreeSpaceCheck(largest_source_size)
+  size = []
+  if system_diff.patch_list: size.append(system_diff.largest_source_size)
+  if vendor_diff:
+    if vendor_diff.patch_list: size.append(vendor_diff.largest_source_size)
+  if size or updating_recovery or updating_boot:
+    script.CacheFreeSpaceCheck(max(size))
 
   device_specific.IncrementalOTA_VerifyEnd()
 
@@ -799,35 +1211,23 @@
     script.WriteRawImage("/boot", "boot.img")
     print "writing full boot image (forced by two-step mode)"
 
-  if OPTIONS.wipe_user_data:
-    script.Print("Erasing user data...")
-    script.FormatPartition("/data")
-
   script.Print("Removing unneeded files...")
-  script.DeleteFiles(["/"+i[0] for i in verbatim_targets] +
-                     ["/"+i for i in sorted(source_data)
-                            if i not in target_data and
-                            i not in renames] +
-                     ["/system/recovery.img"])
+  system_diff.RemoveUnneededFiles(script, ("/system/recovery.img",))
+  if vendor_diff:
+    vendor_diff.RemoveUnneededFiles(script)
 
   script.ShowProgress(0.8, 0)
-  total_patch_size = float(sum([i[1].size for i in patch_list]) + 1)
+  total_patch_size = 1.0 + system_diff.TotalPatchSize()
+  if vendor_diff:
+    total_patch_size += vendor_diff.TotalPatchSize()
   if updating_boot:
     total_patch_size += target_boot.size
-  so_far = 0
 
   script.Print("Patching system files...")
-  deferred_patch_list = []
-  for item in patch_list:
-    tf, sf, size, _ = item
-    if tf.name == "system/build.prop":
-      deferred_patch_list.append(item)
-      continue
-    if (sf.name != tf.name):
-      script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
-    script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
-    so_far += tf.size
-    script.SetProgress(so_far / total_patch_size)
+  so_far = system_diff.EmitPatches(script, total_patch_size, 0)
+  if vendor_diff:
+    script.Print("Patching vendor files...")
+    so_far = vendor_diff.EmitPatches(script, total_patch_size, so_far)
 
   if not OPTIONS.two_step:
     if updating_boot:
@@ -848,6 +1248,10 @@
     else:
       print "boot image unchanged; skipping."
 
+  system_items = ItemSet("system", "META/filesystem_config.txt")
+  if vendor_diff:
+    vendor_items = ItemSet("vendor", "META/vendor_filesystem_config.txt")
+
   if updating_recovery:
     # Recovery is generated as a patch using both the boot image
     # (which contains the same linux kernel as recovery) and the file
@@ -858,26 +1262,39 @@
     # For older builds where recovery-resource.dat is not present, we
     # use only the boot image as the source.
 
-    MakeRecoveryPatch(OPTIONS.target_tmp, output_zip,
-                      target_recovery, target_boot)
-    script.DeleteFiles(["/system/recovery-from-boot.p",
-                        "/system/etc/install-recovery.sh"])
+    if not target_has_recovery_patch:
+      def output_sink(fn, data):
+        common.ZipWriteStr(output_zip, "recovery/" + fn, data)
+        system_items.Get("system/" + fn, dir=False)
+
+      common.MakeRecoveryPatch(OPTIONS.target_tmp, output_sink,
+                               target_recovery, target_boot)
+      script.DeleteFiles(["/system/recovery-from-boot.p",
+                          "/system/etc/install-recovery.sh"])
     print "recovery image changed; including as patch from boot."
   else:
     print "recovery image unchanged; skipping."
 
   script.ShowProgress(0.1, 10)
 
-  target_symlinks = CopySystemFiles(target_zip, None)
+  target_symlinks = CopyPartitionFiles(system_items, target_zip, None)
+  if vendor_diff:
+    target_symlinks.extend(CopyPartitionFiles(vendor_items, target_zip, None))
+
+  temp_script = script.MakeTemporary()
+  system_items.GetMetadata(target_zip)
+  system_items.Get("system").SetPermissions(temp_script)
+  if vendor_diff:
+    vendor_items.GetMetadata(target_zip)
+    vendor_items.Get("vendor").SetPermissions(temp_script)
+
+  # Note that this call will mess up the trees of Items, so make sure
+  # we're done with them.
+  source_symlinks = CopyPartitionFiles(system_items, source_zip, None)
+  if vendor_diff:
+    source_symlinks.extend(CopyPartitionFiles(vendor_items, source_zip, None))
 
   target_symlinks_d = dict([(i[1], i[0]) for i in target_symlinks])
-  temp_script = script.MakeTemporary()
-  Item.GetMetadata(target_zip)
-  Item.Get("system").SetPermissions(temp_script)
-
-  # Note that this call will mess up the tree of Items, so make sure
-  # we're done with it.
-  source_symlinks = CopySystemFiles(source_zip, None)
   source_symlinks_d = dict([(i[1], i[0]) for i in source_symlinks])
 
   # Delete all the symlinks in source that aren't in target.  This
@@ -889,20 +1306,20 @@
       to_delete.append(link)
   script.DeleteFiles(to_delete)
 
-  if verbatim_targets:
-    script.Print("Unpacking new files...")
+  if system_diff.verbatim_targets:
+    script.Print("Unpacking new system files...")
     script.UnpackPackageDir("system", "/system")
+  if vendor_diff and vendor_diff.verbatim_targets:
+    script.Print("Unpacking new vendor files...")
+    script.UnpackPackageDir("vendor", "/vendor")
 
-  if updating_recovery:
+  if updating_recovery and not target_has_recovery_patch:
     script.Print("Unpacking new recovery...")
     script.UnpackPackageDir("recovery", "/system")
 
-  if len(renames) > 0:
-    script.Print("Renaming files...")
-
-  for src in renames:
-    print "Renaming " + src + " to " + renames[src].name
-    script.RenameFile(src, renames[src].name)
+  system_diff.EmitRenames(script)
+  if vendor_diff:
+    vendor_diff.EmitRenames(script)
 
   script.Print("Symlinks and permissions...")
 
@@ -933,10 +1350,11 @@
   # device can still come up, it appears to be the old build and will
   # get set the OTA package again to retry.
   script.Print("Patching remaining system files...")
-  for item in deferred_patch_list:
-    tf, sf, size, _ = item
-    script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
-  script.SetPermissions("/system/build.prop", 0, 0, 0644, None, None)
+  system_diff.EmitDeferredPatches(script)
+
+  if OPTIONS.wipe_user_data:
+    script.Print("Erasing user data...")
+    script.FormatPartition("/data")
 
   if OPTIONS.two_step:
     script.AppendExtra("""
@@ -945,14 +1363,26 @@
 endif;
 """ % bcb_dev)
 
-  script.AddToZip(target_zip, output_zip)
+  if OPTIONS.verify and system_diff:
+    script.Print("Remounting and verifying system partition files...")
+    script.Unmount("/system")
+    script.Mount("/system")
+    system_diff.EmitExplicitTargetVerification(script)
+
+  if OPTIONS.verify and vendor_diff:
+    script.Print("Remounting and verifying vendor partition files...")
+    script.Unmount("/vendor")
+    script.Mount("/vendor")
+    vendor_diff.EmitExplicitTargetVerification(script)
+  script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+
   WriteMetadata(metadata, output_zip)
 
 
 def main(argv):
 
   def option_handler(o, a):
-    if o in ("-b", "--board_config"):
+    if o == "--board_config":
       pass   # deprecated
     elif o in ("-k", "--package_key"):
       OPTIONS.package_key = a
@@ -962,6 +1392,8 @@
       OPTIONS.wipe_user_data = True
     elif o in ("-n", "--no_prereq"):
       OPTIONS.omit_prereq = True
+    elif o in ("-o", "--oem_settings"):
+      OPTIONS.oem_source = a
     elif o in ("-e", "--extra_script"):
       OPTIONS.extra_script = a
     elif o in ("-a", "--aslr_mode"):
@@ -977,14 +1409,22 @@
                          "integers are allowed." % (a, o))
     elif o in ("-2", "--two_step"):
       OPTIONS.two_step = True
-    elif o in ("--no_signing"):
+    elif o == "--no_signing":
       OPTIONS.no_signing = True
+    elif o in ("--verify"):
+      OPTIONS.verify = True
+    elif o == "--block":
+      OPTIONS.block_based = True
+    elif o in ("-b", "--binary"):
+      OPTIONS.updater_binary = a
+    elif o in ("--no_fallback_to_full",):
+      OPTIONS.fallback_to_full = False
     else:
       return False
     return True
 
   args = common.ParseOptions(argv, __doc__,
-                             extra_opts="b:k:i:d:wne:t:a:2",
+                             extra_opts="b:k:i:d:wne:t:a:2o:",
                              extra_long_opts=["board_config=",
                                               "package_key=",
                                               "incremental_from=",
@@ -995,6 +1435,11 @@
                                               "aslr_mode=",
                                               "two_step",
                                               "no_signing",
+                                              "block",
+                                              "binary=",
+                                              "oem_settings=",
+                                              "verify",
+                                              "no_fallback_to_full",
                                               ],
                              extra_option_handler=option_handler)
 
@@ -1023,38 +1468,65 @@
     print "--- target info ---"
     common.DumpInfoDict(OPTIONS.info_dict)
 
+  # If the caller explicitly specified the device-specific extensions
+  # path via -s/--device_specific, use that.  Otherwise, use
+  # META/releasetools.py if it is present in the target target_files.
+  # Otherwise, take the path of the file from 'tool_extensions' in the
+  # info dict and look for that in the local filesystem, relative to
+  # the current directory.
+
   if OPTIONS.device_specific is None:
-    OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+    from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
+    if os.path.exists(from_input):
+      print "(using device-specific extensions from target_files)"
+      OPTIONS.device_specific = from_input
+    else:
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+
   if OPTIONS.device_specific is not None:
-    OPTIONS.device_specific = os.path.normpath(OPTIONS.device_specific)
-    print "using device-specific extensions in", OPTIONS.device_specific
+    OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  if OPTIONS.no_signing:
-    output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
-  else:
-    temp_zip_file = tempfile.NamedTemporaryFile()
-    output_zip = zipfile.ZipFile(temp_zip_file, "w",
-                                 compression=zipfile.ZIP_DEFLATED)
+  while True:
 
-  if OPTIONS.incremental_source is None:
-    WriteFullOTAPackage(input_zip, output_zip)
-    if OPTIONS.package_key is None:
-      OPTIONS.package_key = OPTIONS.info_dict.get(
-          "default_system_dev_certificate",
-          "build/target/product/security/testkey")
-  else:
-    print "unzipping source target-files..."
-    OPTIONS.source_tmp, source_zip = common.UnzipTemp(OPTIONS.incremental_source)
-    OPTIONS.target_info_dict = OPTIONS.info_dict
-    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-    if OPTIONS.package_key is None:
-      OPTIONS.package_key = OPTIONS.source_info_dict.get(
-          "default_system_dev_certificate",
-          "build/target/product/security/testkey")
-    if OPTIONS.verbose:
-      print "--- source info ---"
-      common.DumpInfoDict(OPTIONS.source_info_dict)
-    WriteIncrementalOTAPackage(input_zip, source_zip, output_zip)
+    if OPTIONS.no_signing:
+      if os.path.exists(args[1]): os.unlink(args[1])
+      output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
+    else:
+      temp_zip_file = tempfile.NamedTemporaryFile()
+      output_zip = zipfile.ZipFile(temp_zip_file, "w",
+                                   compression=zipfile.ZIP_DEFLATED)
+
+    if OPTIONS.incremental_source is None:
+      WriteFullOTAPackage(input_zip, output_zip)
+      if OPTIONS.package_key is None:
+        OPTIONS.package_key = OPTIONS.info_dict.get(
+            "default_system_dev_certificate",
+            "build/target/product/security/testkey")
+      break
+
+    else:
+      print "unzipping source target-files..."
+      OPTIONS.source_tmp, source_zip = common.UnzipTemp(OPTIONS.incremental_source)
+      OPTIONS.target_info_dict = OPTIONS.info_dict
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+      if "selinux_fc" in OPTIONS.source_info_dict:
+        OPTIONS.source_info_dict["selinux_fc"] = os.path.join(OPTIONS.source_tmp, "BOOT", "RAMDISK",
+                                                              "file_contexts")
+      if OPTIONS.package_key is None:
+        OPTIONS.package_key = OPTIONS.source_info_dict.get(
+            "default_system_dev_certificate",
+            "build/target/product/security/testkey")
+      if OPTIONS.verbose:
+        print "--- source info ---"
+        common.DumpInfoDict(OPTIONS.source_info_dict)
+      try:
+        WriteIncrementalOTAPackage(input_zip, source_zip, output_zip)
+        break
+      except ValueError:
+        if not OPTIONS.fallback_to_full: raise
+        print "--- failed to build incremental; falling back to full ---"
+        OPTIONS.incremental_source = None
+        output_zip.close()
 
   output_zip.close()
 
@@ -1062,8 +1534,6 @@
     SignOutput(temp_zip_file.name, args[1])
     temp_zip_file.close()
 
-  common.Cleanup()
-
   print "done."
 
 
@@ -1076,3 +1546,5 @@
     print "   ERROR: %s" % (e,)
     print
     sys.exit(1)
+  finally:
+    common.Cleanup()
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
new file mode 100644
index 0000000..8a85d2d
--- /dev/null
+++ b/tools/releasetools/rangelib.py
@@ -0,0 +1,175 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import heapq
+import itertools
+
+__all__ = ["RangeSet"]
+
+class RangeSet(object):
+  """A RangeSet represents a set of nonoverlapping ranges on the
+  integers (ie, a set of integers, but efficient when the set contains
+  lots of runs."""
+
+  def __init__(self, data=None):
+    if data:
+      self.data = tuple(self._remove_pairs(data))
+    else:
+      self.data = ()
+
+  def __iter__(self):
+    for i in range(0, len(self.data), 2):
+      yield self.data[i:i+2]
+
+  def __eq__(self, other):
+    return self.data == other.data
+  def __ne__(self, other):
+    return self.data != other.data
+  def __nonzero__(self):
+    return bool(self.data)
+
+  def __str__(self):
+    if not self.data:
+      return "empty"
+    else:
+      return self.to_string()
+
+  @classmethod
+  def parse(cls, text):
+    """Parse a text string consisting of a space-separated list of
+    blocks and ranges, eg "10-20 30 35-40".  Ranges are interpreted to
+    include both their ends (so the above example represents 18
+    individual blocks.  Returns a RangeSet object.
+
+    If the input has all its blocks in increasing order, then returned
+    RangeSet will have an extra attribute 'monotonic' that is set to
+    True.  For example the input "10-20 30" is monotonic, but the input
+    "15-20 30 10-14" is not, even though they represent the same set
+    of blocks (and the two RangeSets will compare equal with ==).
+    """
+
+    data = []
+    last = -1
+    monotonic = True
+    for p in text.split():
+      if "-" in p:
+        s, e = p.split("-")
+        data.append(int(s))
+        data.append(int(e)+1)
+        if last <= s <= e:
+          last = e
+        else:
+          monotonic = False
+      else:
+        s = int(p)
+        data.append(s)
+        data.append(s+1)
+        if last <= s:
+          last = s+1
+        else:
+          monotonic = True
+    data.sort()
+    r = RangeSet(cls._remove_pairs(data))
+    r.monotonic = monotonic
+    return r
+
+  @staticmethod
+  def _remove_pairs(source):
+    last = None
+    for i in source:
+      if i == last:
+        last = None
+      else:
+        if last is not None:
+          yield last
+        last = i
+    if last is not None:
+      yield last
+
+  def to_string(self):
+    out = []
+    for i in range(0, len(self.data), 2):
+      s, e = self.data[i:i+2]
+      if e == s+1:
+        out.append(str(s))
+      else:
+        out.append(str(s) + "-" + str(e-1))
+    return " ".join(out)
+
+  def to_string_raw(self):
+    return str(len(self.data)) + "," + ",".join(str(i) for i in self.data)
+
+  def union(self, other):
+    """Return a new RangeSet representing the union of this RangeSet
+    with the argument."""
+    out = []
+    z = 0
+    for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
+                            zip(other.data, itertools.cycle((+1, -1)))):
+      if (z == 0 and d == 1) or (z == 1 and d == -1):
+        out.append(p)
+      z += d
+    return RangeSet(data=out)
+
+  def intersect(self, other):
+    """Return a new RangeSet representing the intersection of this
+    RangeSet with the argument."""
+    out = []
+    z = 0
+    for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
+                            zip(other.data, itertools.cycle((+1, -1)))):
+      if (z == 1 and d == 1) or (z == 2 and d == -1):
+        out.append(p)
+      z += d
+    return RangeSet(data=out)
+
+  def subtract(self, other):
+    """Return a new RangeSet representing subtracting the argument
+    from this RangeSet."""
+
+    out = []
+    z = 0
+    for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
+                            zip(other.data, itertools.cycle((-1, +1)))):
+      if (z == 0 and d == 1) or (z == 1 and d == -1):
+        out.append(p)
+      z += d
+    return RangeSet(data=out)
+
+  def overlaps(self, other):
+    """Returns true if the argument has a nonempty overlap with this
+    RangeSet."""
+
+    # This is like intersect, but we can stop as soon as we discover the
+    # output is going to be nonempty.
+    z = 0
+    for p, d in heapq.merge(zip(self.data, itertools.cycle((+1, -1))),
+                            zip(other.data, itertools.cycle((+1, -1)))):
+      if (z == 1 and d == 1) or (z == 2 and d == -1):
+        return True
+      z += d
+    return False
+
+  def size(self):
+    """Returns the total size of the RangeSet (ie, how many integers
+    are in the set)."""
+
+    total = 0
+    for i, p in enumerate(self.data):
+      if i % 2:
+        total += p
+      else:
+        total -= p
+    return total
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
index ab24706..075c925 100755
--- a/tools/releasetools/sign_target_files_apks
+++ b/tools/releasetools/sign_target_files_apks
@@ -67,8 +67,8 @@
 
 import sys
 
-if sys.hexversion < 0x02040000:
-  print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+  print >> sys.stderr, "Python 2.7 or newer is required."
   sys.exit(1)
 
 import base64
@@ -77,10 +77,12 @@
 import errno
 import os
 import re
+import shutil
 import subprocess
 import tempfile
 import zipfile
 
+import add_img_to_target_files
 import common
 
 OPTIONS = common.OPTIONS
@@ -88,6 +90,8 @@
 OPTIONS.extra_apks = {}
 OPTIONS.key_map = {}
 OPTIONS.replace_ota_keys = False
+OPTIONS.replace_verity_public_key = False
+OPTIONS.replace_verity_private_key = False
 OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
 
 def GetApkCerts(tf_zip):
@@ -139,14 +143,49 @@
   return data
 
 
-def SignApks(input_tf_zip, output_tf_zip, apk_key_map, key_passwords):
+def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
+                       apk_key_map, key_passwords):
+
   maxsize = max([len(os.path.basename(i.filename))
                  for i in input_tf_zip.infolist()
                  if i.filename.endswith('.apk')])
+  rebuild_recovery = False
+
+  tmpdir = tempfile.mkdtemp()
+  def write_to_temp(fn, attr, data):
+    fn = os.path.join(tmpdir, fn)
+    if fn.endswith("/"):
+      fn = os.path.join(tmpdir, fn)
+      os.mkdir(fn)
+    else:
+      d = os.path.dirname(fn)
+      if d and not os.path.exists(d):
+        os.makedirs(d)
+
+      if attr >> 16 == 0xa1ff:
+        os.symlink(data, fn)
+      else:
+        with open(fn, "wb") as f:
+          f.write(data)
 
   for info in input_tf_zip.infolist():
+    if info.filename.startswith("IMAGES/"): continue
+
     data = input_tf_zip.read(info.filename)
     out_info = copy.copy(info)
+
+    if (info.filename == "META/misc_info.txt" and
+        OPTIONS.replace_verity_private_key):
+      ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info, OPTIONS.replace_verity_private_key[1])
+    elif (info.filename == "BOOT/RAMDISK/verity_key" and
+        OPTIONS.replace_verity_public_key):
+      ReplaceVerityPublicKey(output_tf_zip, OPTIONS.replace_verity_public_key[1])
+    elif (info.filename.startswith("BOOT/") or
+        info.filename.startswith("RECOVERY/") or
+        info.filename.startswith("META/") or
+        info.filename == "SYSTEM/etc/recovery-resource.dat"):
+      write_to_temp(info.filename, info.external_attr, data)
+
     if info.filename.endswith(".apk"):
       name = os.path.basename(info.filename)
       key = apk_key_map[name]
@@ -161,16 +200,51 @@
     elif info.filename in ("SYSTEM/build.prop",
                            "RECOVERY/RAMDISK/default.prop"):
       print "rewriting %s:" % (info.filename,)
-      new_data = RewriteProps(data)
+      new_data = RewriteProps(data, misc_info)
       output_tf_zip.writestr(out_info, new_data)
+      if info.filename == "RECOVERY/RAMDISK/default.prop":
+        write_to_temp(info.filename, info.external_attr, new_data)
     elif info.filename.endswith("mac_permissions.xml"):
       print "rewriting %s with new keys." % (info.filename,)
       new_data = ReplaceCerts(data)
       output_tf_zip.writestr(out_info, new_data)
+    elif info.filename in ("SYSTEM/recovery-from-boot.p",
+                           "SYSTEM/bin/install-recovery.sh"):
+      rebuild_recovery = True
+    elif (OPTIONS.replace_ota_keys and
+          info.filename in ("RECOVERY/RAMDISK/res/keys",
+                            "SYSTEM/etc/security/otacerts.zip")):
+      # don't copy these files if we're regenerating them below
+      pass
+    elif (OPTIONS.replace_verity_private_key and
+          info.filename == "META/misc_info.txt"):
+      pass
+    elif (OPTIONS.replace_verity_public_key and
+          info.filename == "BOOT/RAMDISK/verity_key"):
+      pass
     else:
       # a non-APK file; copy it verbatim
       output_tf_zip.writestr(out_info, data)
 
+  if OPTIONS.replace_ota_keys:
+    new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
+    if new_recovery_keys:
+      write_to_temp("RECOVERY/RAMDISK/res/keys", 0755 << 16, new_recovery_keys)
+
+  if rebuild_recovery:
+    recovery_img = common.GetBootableImage(
+        "recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
+    boot_img = common.GetBootableImage(
+        "boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
+
+    def output_sink(fn, data):
+      output_tf_zip.writestr("SYSTEM/"+fn, data)
+
+    common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
+                             info_dict=misc_info)
+
+  shutil.rmtree(tmpdir)
+
 
 def ReplaceCerts(data):
   """Given a string of data, replace all occurences of a set
@@ -214,14 +288,20 @@
   return ",".join(sorted(tags))
 
 
-def RewriteProps(data):
+def RewriteProps(data, misc_info):
   output = []
   for line in data.split("\n"):
     line = line.strip()
     original_line = line
-    if line and line[0] != '#':
+    if line and line[0] != '#' and "=" in line:
       key, value = line.split("=", 1)
-      if key == "ro.build.fingerprint":
+      if (key == "ro.build.fingerprint"
+          and misc_info.get("oem_fingerprint_properties") is None):
+        pieces = value.split("/")
+        pieces[-1] = EditTags(pieces[-1])
+        value = "/".join(pieces)
+      elif (key == "ro.build.thumbprint"
+          and misc_info.get("oem_fingerprint_properties") is not None):
         pieces = value.split("/")
         pieces[-1] = EditTags(pieces[-1])
         value = "/".join(pieces)
@@ -235,7 +315,7 @@
       elif key == "ro.build.display.id":
         # change, eg, "JWR66N dev-keys" to "JWR66N"
         value = value.split()
-        if len(value) >  1 and value[-1].endswith("-keys"):
+        if len(value) > 1 and value[-1].endswith("-keys"):
           value.pop()
         value = " ".join(value)
       line = key + "=" + value
@@ -265,7 +345,8 @@
   for k in keylist:
     m = re.match(r"^(.*)\.x509\.pem$", k)
     if not m:
-      raise common.ExternalError("can't parse \"%s\" from META/otakeys.txt" % (k,))
+      raise common.ExternalError(
+          "can't parse \"%s\" from META/otakeys.txt" % (k,))
     k = m.group(1)
     mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
 
@@ -287,10 +368,11 @@
                   os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
                  + mapped_keys + extra_recovery_keys,
                  stdout=subprocess.PIPE)
-  data, _ = p.communicate()
+  new_recovery_keys, _ = p.communicate()
   if p.returncode != 0:
     raise common.ExternalError("failed to run dumpkeys")
-  common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys", data)
+  common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys",
+                     new_recovery_keys)
 
   # SystemUpdateActivity uses the x509.pem version of the keys, but
   # put into a zipfile system/etc/security/otacerts.zip.
@@ -304,6 +386,19 @@
   common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
                      tempfile.getvalue())
 
+  return new_recovery_keys
+
+def ReplaceVerityPublicKey(targetfile_zip, key_path):
+  print "Replacing verity public key with %s" % key_path
+  with open(key_path) as f:
+    common.ZipWriteStr(targetfile_zip, "BOOT/RAMDISK/verity_key", f.read())
+
+def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip, misc_info, key_path):
+  print "Replacing verity private key with %s" % key_path
+  current_key = misc_info["verity_key"]
+  original_misc_info = targetfile_input_zip.read("META/misc_info.txt")
+  new_misc_info = original_misc_info.replace(current_key, key_path)
+  common.ZipWriteStr(targetfile_output_zip, "META/misc_info.txt", new_misc_info)
 
 def BuildKeyMap(misc_info, key_mapping_options):
   for s, d in key_mapping_options:
@@ -347,6 +442,10 @@
           raise ValueError("Bad tag change '%s'" % (i,))
         new.append(i[0] + i[1:].strip())
       OPTIONS.tag_changes = tuple(new)
+    elif o == "--replace_verity_public_key":
+      OPTIONS.replace_verity_public_key = (True, a)
+    elif o == "--replace_verity_private_key":
+      OPTIONS.replace_verity_private_key = (True, a)
     else:
       return False
     return True
@@ -357,7 +456,9 @@
                                               "default_key_mappings=",
                                               "key_mapping=",
                                               "replace_ota_keys",
-                                              "tag_changes="],
+                                              "tag_changes=",
+                                              "replace_verity_public_key=",
+                                              "replace_verity_private_key="],
                              extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -375,14 +476,14 @@
   CheckAllApksSigned(input_zip, apk_key_map)
 
   key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
-  SignApks(input_zip, output_zip, apk_key_map, key_passwords)
-
-  if OPTIONS.replace_ota_keys:
-    ReplaceOtaKeys(input_zip, output_zip, misc_info)
+  ProcessTargetFiles(input_zip, output_zip, misc_info,
+                     apk_key_map, key_passwords)
 
   input_zip.close()
   output_zip.close()
 
+  add_img_to_target_files.AddImagesToTargetFiles(args[1])
+
   print "done."
 
 
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
new file mode 100644
index 0000000..7574747
--- /dev/null
+++ b/tools/releasetools/sparse_img.py
@@ -0,0 +1,213 @@
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import bisect
+import os
+import sys
+import struct
+import pprint
+from hashlib import sha1
+
+from rangelib import *
+
+class SparseImage(object):
+  """Wraps a sparse image file (and optional file map) into an image
+  object suitable for passing to BlockImageDiff."""
+
+  def __init__(self, simg_fn, file_map_fn=None):
+    self.simg_f = f = open(simg_fn, "rb")
+
+    header_bin = f.read(28)
+    header = struct.unpack("<I4H4I", header_bin)
+
+    magic = header[0]
+    major_version = header[1]
+    minor_version = header[2]
+    file_hdr_sz = header[3]
+    chunk_hdr_sz = header[4]
+    self.blocksize = blk_sz = header[5]
+    self.total_blocks = total_blks = header[6]
+    total_chunks = header[7]
+    image_checksum = header[8]
+
+    if magic != 0xED26FF3A:
+      raise ValueError("Magic should be 0xED26FF3A but is 0x%08X" % (magic,))
+    if major_version != 1 or minor_version != 0:
+      raise ValueError("I know about version 1.0, but this is version %u.%u" %
+                       (major_version, minor_version))
+    if file_hdr_sz != 28:
+      raise ValueError("File header size was expected to be 28, but is %u." %
+                       (file_hdr_sz,))
+    if chunk_hdr_sz != 12:
+      raise ValueError("Chunk header size was expected to be 12, but is %u." %
+                       (chunk_hdr_sz,))
+
+    print("Total of %u %u-byte output blocks in %u input chunks."
+          % (total_blks, blk_sz, total_chunks))
+
+    pos = 0   # in blocks
+    care_data = []
+    self.offset_map = offset_map = []
+
+    for i in range(total_chunks):
+      header_bin = f.read(12)
+      header = struct.unpack("<2H2I", header_bin)
+      chunk_type = header[0]
+      reserved1 = header[1]
+      chunk_sz = header[2]
+      total_sz = header[3]
+      data_sz = total_sz - 12
+
+      if chunk_type == 0xCAC1:
+        if data_sz != (chunk_sz * blk_sz):
+          raise ValueError(
+              "Raw chunk input size (%u) does not match output size (%u)" %
+              (data_sz, chunk_sz * blk_sz))
+        else:
+          care_data.append(pos)
+          care_data.append(pos + chunk_sz)
+          offset_map.append((pos, chunk_sz, f.tell(), None))
+          pos += chunk_sz
+          f.seek(data_sz, os.SEEK_CUR)
+
+      elif chunk_type == 0xCAC2:
+        fill_data = f.read(4)
+        care_data.append(pos)
+        care_data.append(pos + chunk_sz)
+        offset_map.append((pos, chunk_sz, None, fill_data))
+        pos += chunk_sz
+
+      elif chunk_type == 0xCAC3:
+        if data_sz != 0:
+          raise ValueError("Don't care chunk input size is non-zero (%u)" %
+                           (data_sz))
+        else:
+          pos += chunk_sz
+
+      elif chunk_type == 0xCAC4:
+        raise ValueError("CRC32 chunks are not supported")
+
+      else:
+        raise ValueError("Unknown chunk type 0x%04X not supported" %
+                         (chunk_type,))
+
+    self.care_map = RangeSet(care_data)
+    self.offset_index = [i[0] for i in offset_map]
+
+    if file_map_fn:
+      self.LoadFileBlockMap(file_map_fn)
+    else:
+      self.file_map = {"__DATA": self.care_map}
+
+  def ReadRangeSet(self, ranges):
+    return [d for d in self._GetRangeData(ranges)]
+
+  def TotalSha1(self):
+    """Return the SHA-1 hash of all data in the 'care' regions of this image."""
+    h = sha1()
+    for d in self._GetRangeData(self.care_map):
+      h.update(d)
+    return h.hexdigest()
+
+  def _GetRangeData(self, ranges):
+    """Generator that produces all the image data in 'ranges'.  The
+    number of individual pieces returned is arbitrary (and in
+    particular is not necessarily equal to the number of ranges in
+    'ranges'.
+
+    This generator is stateful -- it depends on the open file object
+    contained in this SparseImage, so you should not try to run two
+    instances of this generator on the same object simultaneously."""
+
+    f = self.simg_f
+    for s, e in ranges:
+      to_read = e-s
+      idx = bisect.bisect_right(self.offset_index, s) - 1
+      chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+
+      # for the first chunk we may be starting partway through it.
+      remain = chunk_len - (s - chunk_start)
+      this_read = min(remain, to_read)
+      if filepos is not None:
+        p = filepos + ((s - chunk_start) * self.blocksize)
+        f.seek(p, os.SEEK_SET)
+        yield f.read(this_read * self.blocksize)
+      else:
+        yield fill_data * (this_read * (self.blocksize >> 2))
+      to_read -= this_read
+
+      while to_read > 0:
+        # continue with following chunks if this range spans multiple chunks.
+        idx += 1
+        chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+        this_read = min(chunk_len, to_read)
+        if filepos is not None:
+          f.seek(filepos, os.SEEK_SET)
+          yield f.read(this_read * self.blocksize)
+        else:
+          yield fill_data * (this_read * (self.blocksize >> 2))
+        to_read -= this_read
+
+  def LoadFileBlockMap(self, fn):
+    remaining = self.care_map
+    self.file_map = out = {}
+
+    with open(fn) as f:
+      for line in f:
+        fn, ranges = line.split(None, 1)
+        ranges = RangeSet.parse(ranges)
+        out[fn] = ranges
+        assert ranges.size() == ranges.intersect(remaining).size()
+        remaining = remaining.subtract(ranges)
+
+    # For all the remaining blocks in the care_map (ie, those that
+    # aren't part of the data for any file), divide them into blocks
+    # that are all zero and blocks that aren't.  (Zero blocks are
+    # handled specially because (1) there are usually a lot of them
+    # and (2) bsdiff handles files with long sequences of repeated
+    # bytes especially poorly.)
+
+    zero_blocks = []
+    nonzero_blocks = []
+    reference = '\0' * self.blocksize
+
+    f = self.simg_f
+    for s, e in remaining:
+      for b in range(s, e):
+        idx = bisect.bisect_right(self.offset_index, b) - 1
+        chunk_start, chunk_len, filepos, fill_data = self.offset_map[idx]
+        if filepos is not None:
+          filepos += (b-chunk_start) * self.blocksize
+          f.seek(filepos, os.SEEK_SET)
+          data = f.read(self.blocksize)
+        else:
+          if fill_data == reference[:4]:   # fill with all zeros
+            data = reference
+          else:
+            data = None
+
+        if data == reference:
+          zero_blocks.append(b)
+          zero_blocks.append(b+1)
+        else:
+          nonzero_blocks.append(b)
+          nonzero_blocks.append(b+1)
+
+    out["__ZERO"] = RangeSet(data=zero_blocks)
+    out["__NONZERO"] = RangeSet(data=nonzero_blocks)
+
+  def ResetFileMap(self):
+    """Throw away the file map and treat the entire image as
+    undifferentiated data."""
+    self.file_map = {"__DATA": self.care_map}
diff --git a/tools/zipalign/Android.mk b/tools/zipalign/Android.mk
index 708c8bf..7986798 100644
--- a/tools/zipalign/Android.mk
+++ b/tools/zipalign/Android.mk
@@ -12,13 +12,15 @@
 	ZipEntry.cpp \
 	ZipFile.cpp
 
-LOCAL_C_INCLUDES += external/zlib
+LOCAL_C_INCLUDES += external/zlib \
+	external/zopfli/src
 
 LOCAL_STATIC_LIBRARIES := \
 	libandroidfw \
 	libutils \
 	libcutils \
-	liblog
+	liblog \
+	libzopfli
 
 ifeq ($(HOST_OS),linux)
 LOCAL_LDLIBS += -lrt
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index 8b2d1af..dc2826b 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -32,19 +32,20 @@
     fprintf(stderr, "Zip alignment utility\n");
     fprintf(stderr, "Copyright (C) 2009 The Android Open Source Project\n\n");
     fprintf(stderr,
-        "Usage: zipalign [-f] [-v] <align> infile.zip outfile.zip\n"
+        "Usage: zipalign [-f] [-v] [-z] <align> infile.zip outfile.zip\n"
         "       zipalign -c [-v] <align> infile.zip\n\n" );
     fprintf(stderr,
         "  <align>: alignment in bytes, e.g. '4' provides 32-bit alignment\n");
     fprintf(stderr, "  -c: check alignment only (does not modify file)\n");
     fprintf(stderr, "  -f: overwrite existing outfile.zip\n");
     fprintf(stderr, "  -v: verbose output\n");
+    fprintf(stderr, "  -z: recompress using Zopfli\n");
 }
 
 /*
  * Copy all entries from "pZin" to "pZout", aligning as needed.
  */
-static int copyAndAlign(ZipFile* pZin, ZipFile* pZout, int alignment)
+static int copyAndAlign(ZipFile* pZin, ZipFile* pZout, int alignment, bool zopfli)
 {
     int numEntries = pZin->getNumEntries();
     ZipEntry* pEntry;
@@ -67,6 +68,12 @@
             //    pEntry->getFileName(), (long) pEntry->getFileOffset(),
             //    (long) pEntry->getUncompressedLen());
 
+            if (zopfli) {
+                status = pZout->addRecompress(pZin, pEntry, &pNewEntry);
+                bias += pNewEntry->getCompressedLen() - pEntry->getCompressedLen();
+            } else {
+                status = pZout->add(pZin, pEntry, padding, &pNewEntry);
+            }
         } else {
             /*
              * Copy the entry, adjusting as required.  We assume that the
@@ -79,9 +86,9 @@
             //printf("--- %s: orig at %ld(+%d) len=%ld, adding pad=%d\n",
             //    pEntry->getFileName(), (long) pEntry->getFileOffset(),
             //    bias, (long) pEntry->getUncompressedLen(), padding);
+            status = pZout->add(pZin, pEntry, padding, &pNewEntry);
         }
 
-        status = pZout->add(pZin, pEntry, padding, &pNewEntry);
         if (status != NO_ERROR)
             return 1;
         bias += padding;
@@ -98,7 +105,7 @@
  * output file exists and "force" wasn't specified.
  */
 static int process(const char* inFileName, const char* outFileName,
-    int alignment, bool force)
+    int alignment, bool force, bool zopfli)
 {
     ZipFile zin, zout;
 
@@ -129,7 +136,7 @@
         return 1;
     }
 
-    int result = copyAndAlign(&zin, &zout, alignment);
+    int result = copyAndAlign(&zin, &zout, alignment, zopfli);
     if (result != 0) {
         printf("zipalign: failed rewriting '%s' to '%s'\n",
             inFileName, outFileName);
@@ -196,6 +203,7 @@
     bool check = false;
     bool force = false;
     bool verbose = false;
+    bool zopfli = false;
     int result = 1;
     int alignment;
     char* endp;
@@ -222,6 +230,9 @@
             case 'v':
                 verbose = true;
                 break;
+            case 'z':
+                zopfli = true;
+                break;
             default:
                 fprintf(stderr, "ERROR: unknown flag -%c\n", *cp);
                 wantUsage = true;
@@ -252,7 +263,7 @@
         result = verify(argv[1], alignment, verbose);
     } else {
         /* create the new archive */
-        result = process(argv[1], argv[2], alignment, force);
+        result = process(argv[1], argv[2], alignment, force, zopfli);
 
         /* trust, but verify */
         if (result == 0)
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 8057068..3c5ec15 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -28,6 +28,8 @@
 #include <zlib.h>
 #define DEF_MEM_LEVEL 8                // normally in zutil.h?
 
+#include "zopfli/deflate.h"
+
 #include <memory.h>
 #include <sys/stat.h>
 #include <errno.h>
@@ -638,6 +640,141 @@
 }
 
 /*
+ * Add an entry by copying it from another zip file, recompressing with
+ * Zopfli if already compressed.
+ *
+ * If "ppEntry" is non-NULL, a pointer to the new entry will be returned.
+ */
+status_t ZipFile::addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
+    ZipEntry** ppEntry)
+{
+    ZipEntry* pEntry = NULL;
+    status_t result;
+    long lfhPosn, startPosn, endPosn, uncompressedLen;
+
+    if (mReadOnly)
+        return INVALID_OPERATION;
+
+    /* make sure we're in a reasonable state */
+    assert(mZipFp != NULL);
+    assert(mEntries.size() == mEOCD.mTotalNumEntries);
+
+    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+        result = UNKNOWN_ERROR;
+        goto bail;
+    }
+
+    pEntry = new ZipEntry;
+    if (pEntry == NULL) {
+        result = NO_MEMORY;
+        goto bail;
+    }
+
+    result = pEntry->initFromExternal(pSourceZip, pSourceEntry);
+    if (result != NO_ERROR)
+        goto bail;
+
+    /*
+     * From here on out, failures are more interesting.
+     */
+    mNeedCDRewrite = true;
+
+    /*
+     * Write the LFH, even though it's still mostly blank.  We need it
+     * as a place-holder.  In theory the LFH isn't necessary, but in
+     * practice some utilities demand it.
+     */
+    lfhPosn = ftell(mZipFp);
+    pEntry->mLFH.write(mZipFp);
+    startPosn = ftell(mZipFp);
+
+    /*
+     * Copy the data over.
+     *
+     * If the "has data descriptor" flag is set, we want to copy the DD
+     * fields as well.  This is a fixed-size area immediately following
+     * the data.
+     */
+    if (fseek(pSourceZip->mZipFp, pSourceEntry->getFileOffset(), SEEK_SET) != 0)
+    {
+        result = UNKNOWN_ERROR;
+        goto bail;
+    }
+
+    uncompressedLen = pSourceEntry->getUncompressedLen();
+
+    if (pSourceEntry->isCompressed()) {
+        void *buf = pSourceZip->uncompress(pSourceEntry);
+        if (buf == NULL) {
+            result = NO_MEMORY;
+            goto bail;
+        }
+        long startPosn = ftell(mZipFp);
+        unsigned long crc;
+        if (compressFpToFp(mZipFp, NULL, buf, uncompressedLen, &crc) != NO_ERROR) {
+            ALOGW("recompress of '%s' failed\n", pEntry->mCDE.mFileName);
+            result = UNKNOWN_ERROR;
+            free(buf);
+            goto bail;
+        }
+        long endPosn = ftell(mZipFp);
+        pEntry->setDataInfo(uncompressedLen, endPosn - startPosn,
+            pSourceEntry->getCRC32(), ZipEntry::kCompressDeflated);
+        free(buf);
+    } else {
+        off_t copyLen;
+        copyLen = pSourceEntry->getCompressedLen();
+        if ((pSourceEntry->mLFH.mGPBitFlag & ZipEntry::kUsesDataDescr) != 0)
+            copyLen += ZipEntry::kDataDescriptorLen;
+
+        if (copyPartialFpToFp(mZipFp, pSourceZip->mZipFp, copyLen, NULL)
+            != NO_ERROR)
+        {
+            ALOGW("copy of '%s' failed\n", pEntry->mCDE.mFileName);
+            result = UNKNOWN_ERROR;
+            goto bail;
+        }
+    }
+
+    /*
+     * Update file offsets.
+     */
+    endPosn = ftell(mZipFp);
+
+    /*
+     * Success!  Fill out new values.
+     */
+    pEntry->setLFHOffset(lfhPosn);
+    mEOCD.mNumEntries++;
+    mEOCD.mTotalNumEntries++;
+    mEOCD.mCentralDirSize = 0;      // mark invalid; set by flush()
+    mEOCD.mCentralDirOffset = endPosn;
+
+    /*
+     * Go back and write the LFH.
+     */
+    if (fseek(mZipFp, lfhPosn, SEEK_SET) != 0) {
+        result = UNKNOWN_ERROR;
+        goto bail;
+    }
+    pEntry->mLFH.write(mZipFp);
+
+    /*
+     * Add pEntry to the list.
+     */
+    mEntries.add(pEntry);
+    if (ppEntry != NULL)
+        *ppEntry = pEntry;
+    pEntry = NULL;
+
+    result = NO_ERROR;
+
+bail:
+    delete pEntry;
+    return result;
+}
+
+/*
  * Copy all of the bytes in "src" to "dst".
  *
  * On exit, "srcFp" will be seeked to the end of the file, and "dstFp"
@@ -744,73 +881,43 @@
     const void* data, size_t size, unsigned long* pCRC32)
 {
     status_t result = NO_ERROR;
-    const size_t kBufSize = 32768;
+    const size_t kBufSize = 1024 * 1024;
     unsigned char* inBuf = NULL;
     unsigned char* outBuf = NULL;
-    z_stream zstream;
+    size_t outSize = 0;
     bool atEof = false;     // no feof() aviailable yet
     unsigned long crc;
-    int zerr;
+    ZopfliOptions options;
+    unsigned char bp = 0;
 
-    /*
-     * Create an input buffer and an output buffer.
-     */
-    inBuf = new unsigned char[kBufSize];
-    outBuf = new unsigned char[kBufSize];
-    if (inBuf == NULL || outBuf == NULL) {
-        result = NO_MEMORY;
-        goto bail;
-    }
-
-    /*
-     * Initialize the zlib stream.
-     */
-    memset(&zstream, 0, sizeof(zstream));
-    zstream.zalloc = Z_NULL;
-    zstream.zfree = Z_NULL;
-    zstream.opaque = Z_NULL;
-    zstream.next_in = NULL;
-    zstream.avail_in = 0;
-    zstream.next_out = outBuf;
-    zstream.avail_out = kBufSize;
-    zstream.data_type = Z_UNKNOWN;
-
-    zerr = deflateInit2(&zstream, Z_BEST_COMPRESSION,
-        Z_DEFLATED, -MAX_WBITS, DEF_MEM_LEVEL, Z_DEFAULT_STRATEGY);
-    if (zerr != Z_OK) {
-        result = UNKNOWN_ERROR;
-        if (zerr == Z_VERSION_ERROR) {
-            ALOGE("Installed zlib is not compatible with linked version (%s)\n",
-                ZLIB_VERSION);
-        } else {
-            ALOGD("Call to deflateInit2 failed (zerr=%d)\n", zerr);
-        }
-        goto bail;
-    }
+    ZopfliInitOptions(&options);
 
     crc = crc32(0L, Z_NULL, 0);
 
-    /*
-     * Loop while we have data.
-     */
-    do {
-        size_t getSize;
-        int flush;
+    if (data) {
+        crc = crc32(crc, (const unsigned char*)data, size);
+        ZopfliDeflate(&options, 2, true, (const unsigned char*)data, size, &bp,
+            &outBuf, &outSize);
+    } else {
+        /*
+         * Create an input buffer and an output buffer.
+         */
+        inBuf = new unsigned char[kBufSize];
+        if (inBuf == NULL) {
+            result = NO_MEMORY;
+            goto bail;
+        }
 
-        /* only read if the input buffer is empty */
-        if (zstream.avail_in == 0 && !atEof) {
-            ALOGV("+++ reading %d bytes\n", (int)kBufSize);
-            if (data) {
-                getSize = size > kBufSize ? kBufSize : size;
-                memcpy(inBuf, data, getSize);
-                data = ((const char*)data) + getSize;
-                size -= getSize;
-            } else {
-                getSize = fread(inBuf, 1, kBufSize, srcFp);
-                if (ferror(srcFp)) {
-                    ALOGD("deflate read failed (errno=%d)\n", errno);
-                    goto z_bail;
-                }
+        /*
+         * Loop while we have data.
+         */
+        do {
+            size_t getSize;
+            getSize = fread(inBuf, 1, kBufSize, srcFp);
+            if (ferror(srcFp)) {
+                ALOGD("deflate read failed (errno=%d)\n", errno);
+                delete[] inBuf;
+                goto bail;
             }
             if (getSize < kBufSize) {
                 ALOGV("+++  got %d bytes, EOF reached\n",
@@ -819,51 +926,21 @@
             }
 
             crc = crc32(crc, inBuf, getSize);
+            ZopfliDeflate(&options, 2, atEof, inBuf, getSize, &bp, &outBuf, &outSize);
+        } while (!atEof);
+        delete[] inBuf;
+    }
 
-            zstream.next_in = inBuf;
-            zstream.avail_in = getSize;
-        }
-
-        if (atEof)
-            flush = Z_FINISH;       /* tell zlib that we're done */
-        else
-            flush = Z_NO_FLUSH;     /* more to come! */
-
-        zerr = deflate(&zstream, flush);
-        if (zerr != Z_OK && zerr != Z_STREAM_END) {
-            ALOGD("zlib deflate call failed (zerr=%d)\n", zerr);
-            result = UNKNOWN_ERROR;
-            goto z_bail;
-        }
-
-        /* write when we're full or when we're done */
-        if (zstream.avail_out == 0 ||
-            (zerr == Z_STREAM_END && zstream.avail_out != (uInt) kBufSize))
-        {
-            ALOGV("+++ writing %d bytes\n", (int) (zstream.next_out - outBuf));
-            if (fwrite(outBuf, 1, zstream.next_out - outBuf, dstFp) !=
-                (size_t)(zstream.next_out - outBuf))
-            {
-                ALOGD("write %d failed in deflate\n",
-                    (int) (zstream.next_out - outBuf));
-                goto z_bail;
-            }
-
-            zstream.next_out = outBuf;
-            zstream.avail_out = kBufSize;
-        }
-    } while (zerr == Z_OK);
-
-    assert(zerr == Z_STREAM_END);       /* other errors should've been caught */
+    ALOGV("+++ writing %d bytes\n", (int)outSize);
+    if (fwrite(outBuf, 1, outSize, dstFp) != outSize) {
+        ALOGD("write %d failed in deflate\n", (int)outSize);
+        goto bail;
+    }
 
     *pCRC32 = crc;
 
-z_bail:
-    deflateEnd(&zstream);        /* free up any allocated structures */
-
 bail:
-    delete[] inBuf;
-    delete[] outBuf;
+    free(outBuf);
 
     return result;
 }
@@ -1148,7 +1225,7 @@
 #endif
 
 // free the memory when you're done
-void* ZipFile::uncompress(const ZipEntry* entry)
+void* ZipFile::uncompress(const ZipEntry* entry) const
 {
     size_t unlen = entry->getUncompressedLen();
     size_t clen = entry->getCompressedLen();
diff --git a/tools/zipalign/ZipFile.h b/tools/zipalign/ZipFile.h
index 7877550..b99cda5 100644
--- a/tools/zipalign/ZipFile.h
+++ b/tools/zipalign/ZipFile.h
@@ -127,6 +127,15 @@
         int padding, ZipEntry** ppEntry);
 
     /*
+     * Add an entry by copying it from another zip file, recompressing with
+     * Zopfli if already compressed.
+     *
+     * If "ppEntry" is non-NULL, a pointer to the new entry will be returned.
+     */
+    status_t addRecompress(const ZipFile* pSourceZip, const ZipEntry* pSourceEntry,
+        ZipEntry** ppEntry);
+
+    /*
      * Mark an entry as having been removed.  It is not actually deleted
      * from the archive or our internal data structures until flush() is
      * called.
@@ -147,7 +156,7 @@
      */
     //bool uncompress(const ZipEntry* pEntry, void* buf) const;
     //bool uncompress(const ZipEntry* pEntry, FILE* fp) const;
-    void* uncompress(const ZipEntry* pEntry);
+    void* uncompress(const ZipEntry* pEntry) const;
 
     /*
      * Get an entry, by name.  Returns NULL if not found.