Merge "Use zip2zip for uncompress-dexs and uncompress-shared-libs"
diff --git a/core/Makefile b/core/Makefile
index a2d9339..517410a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2916,7 +2916,7 @@
 ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
 
 # BOARD_SUPER_PARTITION_SIZE must be defined to build super image.
-ifdef BOARD_SUPER_PARTITION_SIZE
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
 
 INSTALLED_SUPERIMAGE_TARGET := $(PRODUCT_OUT)/super.img
 INSTALLED_SUPERIMAGE_EMPTY_TARGET := $(PRODUCT_OUT)/super_empty.img
@@ -2940,9 +2940,9 @@
   --metadata-slots $(if $(1),2,1) \
   --device-size $(BOARD_SUPER_PARTITION_SIZE) \
   $(foreach name,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
-    --partition $(name)$(1):$$($(UUIDGEN) $(name)$(1)):readonly:$(if $(2),$(call read-size-of-partitions,$(name)),0) \
+    --partition $(name)$(1):readonly:$(if $(2),$(call read-size-of-partitions,$(name)),0) \
     $(if $(2), --image $(name)$(1)=$(call images-for-partitions,$(name))) \
-    $(if $(1), --partition $(name)_b:$$($(UUIDGEN) $(name)_b):readonly:0) \
+    $(if $(1), --partition $(name)_b:readonly:0) \
   )
 endef
 
@@ -2977,32 +2977,61 @@
 # Do not check for apps-only build
 
 ifeq (true,$(PRODUCT_BUILD_SUPER_PARTITION))
-ifdef BOARD_SUPER_PARTITION_SIZE
-ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
 
-droid_targets: check_android_partition_sizes
+droid_targets: check-all-partition-sizes
 
-.PHONY: check_android_partition_sizes
+.PHONY: check-all-partition-sizes check-all-partition-sizes-nodeps
 
 # Add image dependencies so that generated_*_image_info.txt are written before checking.
-check_android_partition_sizes: $(call images-for-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))
+check-all-partition-sizes: $(call images-for-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))
 
-check_android_partition_sizes:
-	partition_size_list="$(call read-size-of-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))"; \
-	sum_sizes_expr=$$(sed -e 's/ /+/g' <<< "$${partition_size_list}"); \
-	max_size_tail=$(if $(filter true,$(AB_OTA_UPDATER))," / 2"); \
-	max_size_expr=$(BOARD_SUPER_PARTITION_SIZE)$${max_size_tail}; \
-	if [ $$(( $${sum_sizes_expr} )) -gt $$(( $${max_size_expr} )) ]; then \
-		echo "The sum of sizes of all logical partitions is larger than BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
-		echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
-		exit 1; \
-	else \
-		echo "The sum of sizes of all logical partitions is within BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
-		echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
-	fi
+# $(1): human-readable max size string
+# $(2): max size expression
+# $(3): list of partition names
+define check-sum-of-partition-sizes
+  partition_size_list="$(call read-size-of-partitions,$(3))"; \
+  sum_sizes_expr=$$(sed -e 's/ /+/g' <<< "$${partition_size_list}"); \
+  if [ $$(( $${sum_sizes_expr} )) -gt $$(( $(2) )) ]; then \
+    echo "The sum of sizes of [$(strip $(3))] is larger than $(strip $(1)):"; \
+    echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' "$(2)" '==' $$(( $(2) )); \
+    exit 1; \
+  else \
+    echo "The sum of sizes of [$(strip $(3))] is within $(strip $(1)):"; \
+    echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' "$(2)" '==' $$(( $(2) )); \
+  fi
+endef
 
-endif # BOARD_SUPER_PARTITION_PARTITION_LIST
-endif # BOARD_SUPER_PARTITION_SIZE
+define check-all-partition-sizes-target
+  # Check sum(all partitions) <= super partition (/ 2 for A/B)
+  $(if $(BOARD_SUPER_PARTITION_SIZE),$(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+    $(call check-sum-of-partition-sizes,BOARD_SUPER_PARTITION_SIZE$(if $(filter true,$(AB_OTA_UPDATER)), / 2), \
+      $(BOARD_SUPER_PARTITION_SIZE)$(if $(filter true,$(AB_OTA_UPDATER)), / 2),$(BOARD_SUPER_PARTITION_PARTITION_LIST))))
+
+  # For each group, check sum(partitions in group) <= group size
+  $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+    $(if $(BOARD_$(group)_SIZE),$(if $(BOARD_$(group)_PARTITION_LIST), \
+      $(call check-sum-of-partition-sizes,BOARD_$(group)_SIZE,$(BOARD_$(group)_SIZE),$(BOARD_$(group)_PARTITION_LIST)))))
+
+  # Check sum(all group sizes) <= super partition (/ 2 for A/B)
+  if [[ ! -z $(BOARD_SUPER_PARTITION_SIZE) ]]; then \
+    group_size_list="$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)),$(BOARD_$(group)_SIZE))"; \
+    sum_sizes_expr=$$(sed -e 's/ /+/g' <<< "$${group_size_list}"); \
+    max_size_tail=$(if $(filter true,$(AB_OTA_UPDATER))," / 2"); \
+    max_size_expr="$(BOARD_SUPER_PARTITION_SIZE)$${max_size_tail}"; \
+    if [ $$(( $${sum_sizes_expr} )) -gt $$(( $${max_size_expr} )) ]; then \
+      echo "The sum of sizes of [$(strip $(BOARD_SUPER_PARTITION_GROUPS))] is larger than BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
+      echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
+      exit 1; \
+    else \
+      echo "The sum of sizes of [$(strip $(BOARD_SUPER_PARTITION_GROUPS))] is within BOARD_SUPER_PARTITION_SIZE$${max_size_tail}:"; \
+      echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' $${max_size_expr} '==' $$(( $${max_size_expr} )); \
+    fi \
+  fi
+endef
+
+check-all-partition-sizes check-all-partition-sizes-nodeps:
+	$(call check-all-partition-sizes-target)
+
 endif # PRODUCT_BUILD_SUPER_PARTITION
 
 endif # TARGET_BUILD_APPS
@@ -3602,7 +3631,7 @@
 ifdef BUILT_VENDOR_MATRIX
 	$(hide) cp $(BUILT_VENDOR_MATRIX) $(zip_root)/META/vendor_matrix.xml
 endif
-ifdef BOARD_SUPER_PARTITION_SIZE
+ifneq ($(BOARD_SUPER_PARTITION_SIZE),)
 	$(hide) echo "super_size=$(BOARD_SUPER_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "lpmake=$(notdir $(LPMAKE))" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo -n "lpmake_args=" >> $(zip_root)/META/misc_info.txt
diff --git a/core/base_rules.mk b/core/base_rules.mk
index fcc8ede..57fd818 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -759,8 +759,6 @@
 ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
 ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := $(LOCAL_COMPATIBILITY_SUITE)
 ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
-ALL_MODULES.$(my_register_name).SRCS := \
-    $(ALL_MODULES.$(my_register_name).SRCS) $(LOCAL_SRC_FILES)
 test_config :=
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index b8ee423..07fb48a 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -46,8 +46,8 @@
 my_cflags := $(LOCAL_CFLAGS)
 my_conlyflags := $(LOCAL_CONLYFLAGS)
 my_cppflags := $(LOCAL_CPPFLAGS)
-my_cflags_no_override := $(GLOBAL_CFLAGS_NO_OVERRIDE)
-my_cppflags_no_override := $(GLOBAL_CPPFLAGS_NO_OVERRIDE)
+my_cflags_no_override := $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
+my_cppflags_no_override := $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
 my_ldflags := $(LOCAL_LDFLAGS)
 my_ldlibs := $(LOCAL_LDLIBS)
 my_asflags := $(LOCAL_ASFLAGS)
@@ -626,8 +626,6 @@
 # actually used (although they are usually empty).
 arm_objects_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)$(arm_objects_mode)_CFLAGS)
 normal_objects_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)$(normal_objects_mode)_CFLAGS)
-arm_objects_cflags := $(call convert-to-clang-flags,$(arm_objects_cflags))
-normal_objects_cflags := $(call convert-to-clang-flags,$(normal_objects_cflags))
 
 else
 arm_objects_mode :=
@@ -1561,8 +1559,6 @@
 my_cflags += $(LOCAL_CLANG_CFLAGS)
 my_conlyflags += $(LOCAL_CLANG_CONLYFLAGS)
 my_cppflags += $(LOCAL_CLANG_CPPFLAGS)
-my_cflags_no_override += $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
-my_cppflags_no_override += $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
 my_asflags += $(LOCAL_CLANG_ASFLAGS)
 my_ldflags += $(LOCAL_CLANG_LDFLAGS)
 my_cflags += $(LOCAL_CLANG_CFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CLANG_CFLAGS_$(my_32_64_bit_suffix))
diff --git a/core/config.mk b/core/config.mk
index 483bc77..b9174b3 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -99,6 +99,15 @@
   TARGET_CLANG_SUPPORTED 2ND_TARGET_CLANG_SUPPORTED \
   TARGET_CC 2ND_TARGET_CC \
   TARGET_CXX 2ND_TARGET_CXX \
+  TARGET_TOOLCHAIN_ROOT 2ND_TARGET_TOOLCHAIN_ROOT \
+  HOST_TOOLCHAIN_ROOT 2ND_HOST_TOOLCHAIN_ROOT \
+  HOST_CROSS_TOOLCHAIN_ROOT 2ND_HOST_CROSS_TOOLCHAIN_ROOT \
+  HOST_TOOLS_PREFIX 2ND_HOST_TOOLS_PREFIX \
+  HOST_CROSS_TOOLS_PREFIX 2ND_HOST_CROSS_TOOLS_PREFIX \
+  HOST_GCC_VERSION 2ND_HOST_GCC_VERSION \
+  HOST_CROSS_GCC_VERSION 2ND_HOST_CROSS_GCC_VERSION \
+  TARGET_NDK_GCC_VERSION 2ND_TARGET_NDK_GCC_VERSION \
+  GLOBAL_CFLAGS_NO_OVERRIDE GLOBAL_CPPFLAGS_NO_OVERRIDE \
   ,GCC support has been removed. Use Clang instead)
 
 # This is marked as obsolete in envsetup.mk after reading the BoardConfig.mk
@@ -705,7 +714,6 @@
 DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
 FAT16COPY := build/make/tools/fat16copy.py
 CHECK_LINK_TYPE := build/make/tools/check_link_type.py
-UUIDGEN := build/make/tools/uuidgen.py
 LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
 
 PROGUARD := external/proguard/bin/proguard.sh
@@ -1000,16 +1008,42 @@
 endif # PRODUCT_USE_DYNAMIC_PARTITION_SIZE
 
 ifeq ($(PRODUCT_BUILD_SUPER_PARTITION),true)
-ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
-# BOARD_SUPER_PARTITION_PARTITION_LIST: a list of the following tokens
+
+# BOARD_SUPER_PARTITION_GROUPS defines a list of "updatable groups". Each updatable group is a
+# group of partitions that share the same pool of free spaces.
+# For each group in BOARD_SUPER_PARTITION_GROUPS, a BOARD_{GROUP}_SIZE and
+# BOARD_{GROUP}_PARTITION_PARTITION_LIST may be defined.
+#     - BOARD_{GROUP}_SIZE: The maximum sum of sizes of all partitions in the group.
+#       If empty, no limit is enforced on the sum of sizes for this group.
+#     - BOARD_{GROUP}_PARTITION_PARTITION_LIST: the list of partitions that belongs to this group.
+#       If empty, no partitions belong to this group, and the sum of sizes is effectively 0.
+$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+    $(eval BOARD_$(group)_SIZE ?=) \
+    $(eval .KATI_READONLY := BOARD_$(group)_SIZE) \
+    $(eval BOARD_$(group)_PARTITION_LIST ?=) \
+    $(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
+)
+
+# BOARD_*_PARTITION_LIST: a list of the following tokens
 valid_super_partition_list := system vendor product product_services
-ifneq (,$(filter-out $(valid_super_partition_list),$(BOARD_SUPER_PARTITION_PARTITION_LIST)))
-$(error BOARD_SUPER_PARTITION_PARTITION_LIST contains invalid partition name \
-		($(filter-out $(valid_super_partition_list),$(BOARD_SUPER_PARTITION_PARTITION_LIST))). \
-        Valid names are $(valid_super_partition_list))
-endif
+$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+    $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
+        $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
+            $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
+            Valid names are $(valid_super_partition_list))))
 valid_super_partition_list :=
-endif # BOARD_SUPER_PARTITION_PARTITION_LIST
+
+
+# Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
+ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
+$(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
+    BOARD_SUPER_PARTITION_GROUPS and BOARD_*_PARTITION_LIST)
+endif
+BOARD_SUPER_PARTITION_PARTITION_LIST := \
+    $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
+        $(BOARD_$(group)_PARTITION_LIST))
+.KATI_READONLY := BOARD_SUPER_PARTITION_PARTITION_LIST
+
 endif # PRODUCT_BUILD_SUPER_PARTITION
 
 # ###############################################################
diff --git a/core/java.mk b/core/java.mk
index 6ca2904..c015e4a 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -170,6 +170,7 @@
                 $(filter %.java,$(LOCAL_GENERATED_SOURCES))
 java_intermediate_sources := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
 all_java_sources := $(java_sources) $(java_intermediate_sources)
+ALL_MODULES.$(my_register_name).SRCS := $(ALL_MODULES.$(my_register_name).SRCS) $(all_java_sources)
 
 include $(BUILD_SYSTEM)/java_common.mk
 
diff --git a/core/product.mk b/core/product.mk
index 8c8246e..d1c74e7 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -408,7 +408,7 @@
 	BOARD_PRODUCTIMAGE_PARTITION_RESERVED_SIZE \
 	BOARD_PRODUCT_SERVICESIMAGE_PARTITION_RESERVED_SIZE \
 	BOARD_SUPER_PARTITION_SIZE \
-	BOARD_SUPER_PARTITION_PARTITION_LIST \
+	BOARD_SUPER_PARTITION_GROUPS \
 
 #
 # Mark the variables in _product_stash_var_list as readonly
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index b282048..11f5fe4 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -90,6 +90,7 @@
     init.rc \
     input \
     installd \
+    iorapd \
     ip \
     ip6tables \
     iptables \
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index d7d1bc8..2fa5f52 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -49,7 +49,6 @@
 import os
 import shlex
 import shutil
-import subprocess
 import sys
 import uuid
 import zipfile
@@ -259,10 +258,11 @@
     args = OPTIONS.info_dict.get("avb_dtbo_add_hash_footer_args")
     if args and args.strip():
       cmd.extend(shlex.split(args))
-    p = common.Run(cmd, stdout=subprocess.PIPE)
-    p.communicate()
-    assert p.returncode == 0, \
-        "avbtool add_hash_footer of %s failed" % (img.name,)
+    proc = common.Run(cmd)
+    output, _ = proc.communicate()
+    assert proc.returncode == 0, \
+        "Failed to call 'avbtool add_hash_footer' for {}:\n{}".format(
+            img.name, output)
 
   img.Write()
   return img.name
@@ -451,9 +451,9 @@
         assert found, 'Failed to find {}'.format(image_path)
     cmd.extend(split_args)
 
-  p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-  stdoutdata, _ = p.communicate()
-  assert p.returncode == 0, \
+  proc = common.Run(cmd)
+  stdoutdata, _ = proc.communicate()
+  assert proc.returncode == 0, \
       "avbtool make_vbmeta_image failed:\n{}".format(stdoutdata)
   img.Write()
 
@@ -481,9 +481,9 @@
   if args:
     cmd.extend(shlex.split(args))
 
-  p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-  stdoutdata, _ = p.communicate()
-  assert p.returncode == 0, \
+  proc = common.Run(cmd)
+  stdoutdata, _ = proc.communicate()
+  assert proc.returncode == 0, \
       "bpttool make_table failed:\n{}".format(stdoutdata)
 
   img.Write()
@@ -600,12 +600,10 @@
 
   temp_care_map = common.MakeTempFile(prefix="caremap-", suffix=".pb")
   care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
-  p = common.Run(care_map_gen_cmd, stdout=subprocess.PIPE,
-                 stderr=subprocess.STDOUT)
-  output, _ = p.communicate()
-  if OPTIONS.verbose:
-    print(output.rstrip())
-  assert p.returncode == 0, "Failed to generate the care_map proto message."
+  proc = common.Run(care_map_gen_cmd)
+  output, _ = proc.communicate()
+  assert proc.returncode == 0, \
+      "Failed to generate the care_map proto message:\n{}".format(output)
 
   care_map_path = "META/care_map.pb"
   if output_zip and care_map_path not in output_zip.namelist():
@@ -656,9 +654,9 @@
   cmd += shlex.split(OPTIONS.info_dict.get('lpmake_args').strip())
   cmd += ['--output', img.name]
 
-  p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-  stdoutdata, _ = p.communicate()
-  assert p.returncode == 0, \
+  proc = common.Run(cmd)
+  stdoutdata, _ = proc.communicate()
+  assert proc.returncode == 0, \
       "lpmake tool failed:\n{}".format(stdoutdata)
 
   img.Write()
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index aeb4379..189dba2 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -23,7 +23,6 @@
 import os
 import os.path
 import re
-import subprocess
 import sys
 import threading
 from collections import deque, OrderedDict
@@ -43,11 +42,10 @@
 
   # Don't dump the bsdiff/imgdiff commands, which are not useful for the case
   # here, since they contain temp filenames only.
-  p = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
-                 stderr=subprocess.STDOUT)
-  output, _ = p.communicate()
+  proc = common.Run(cmd, verbose=False)
+  output, _ = proc.communicate()
 
-  if p.returncode != 0:
+  if proc.returncode != 0:
     raise ValueError(output)
 
   with open(patchfile, 'rb') as f:
@@ -1494,9 +1492,9 @@
                "--block-limit={}".format(max_blocks_per_transfer),
                "--split-info=" + patch_info_file,
                src_file, tgt_file, patch_file]
-        p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-        imgdiff_output, _ = p.communicate()
-        assert p.returncode == 0, \
+        proc = common.Run(cmd)
+        imgdiff_output, _ = proc.communicate()
+        assert proc.returncode == 0, \
             "Failed to create imgdiff patch between {} and {}:\n{}".format(
                 src_name, tgt_name, imgdiff_output)
 
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 3cac90a..a580709 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -24,7 +24,6 @@
 import re
 import subprocess
 import sys
-import tempfile
 import zipfile
 
 from hashlib import sha1
@@ -165,11 +164,11 @@
   cmd = ['delta_generator',
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
-  proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  proc = common.Run(cmd)
   stdoutdata, _ = proc.communicate()
   assert proc.returncode == 0, \
-      'Failed to verify payload with delta_generator: %s\n%s' % (package,
-                                                                 stdoutdata)
+      'Failed to verify payload with delta_generator: {}\n{}'.format(
+          package, stdoutdata)
   common.ZipClose(package_zip)
 
   # Verified successfully upon reaching here.
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 4e2346c..e381676 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -121,15 +121,26 @@
 
 
 def Run(args, verbose=None, **kwargs):
-  """Create and return a subprocess.Popen object.
+  """Creates and returns a subprocess.Popen object.
 
-  Caller can specify if the command line should be printed. The global
-  OPTIONS.verbose will be used if not specified.
+  Args:
+    args: The command represented as a list of strings.
+    verbose: Whether the commands should be shown (default to OPTIONS.verbose
+        if unspecified).
+    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
+        stdin, etc. stdout and stderr will default to subprocess.PIPE and
+        subprocess.STDOUT respectively unless caller specifies any of them.
+
+  Returns:
+    A subprocess.Popen object.
   """
   if verbose is None:
     verbose = OPTIONS.verbose
+  if 'stdout' not in kwargs and 'stderr' not in kwargs:
+    kwargs['stdout'] = subprocess.PIPE
+    kwargs['stderr'] = subprocess.STDOUT
   if verbose:
-    print("  running: ", " ".join(args))
+    print("  Running: \"{}\"".format(" ".join(args)))
   return subprocess.Popen(args, **kwargs)
 
 
@@ -443,8 +454,7 @@
   avbtool = os.getenv('AVBTOOL') or info_dict["avb_avbtool"]
   pubkey_path = MakeTempFile(prefix="avb-", suffix=".pubkey")
   proc = Run(
-      [avbtool, "extract_public_key", "--key", key, "--output", pubkey_path],
-      stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+      [avbtool, "extract_public_key", "--key", key, "--output", pubkey_path])
   stdoutdata, _ = proc.communicate()
   assert proc.returncode == 0, \
       "Failed to extract pubkey for {}:\n{}".format(
@@ -551,9 +561,10 @@
     fn = os.path.join(sourcedir, "recovery_dtbo")
     cmd.extend(["--recovery_dtbo", fn])
 
-  p = Run(cmd, stdout=subprocess.PIPE)
-  p.communicate()
-  assert p.returncode == 0, "mkbootimg of %s image failed" % (partition_name,)
+  proc = Run(cmd)
+  output, _ = proc.communicate()
+  assert proc.returncode == 0, \
+      "Failed to run mkbootimg of {}:\n{}".format(partition_name, output)
 
   if (info_dict.get("boot_signer") == "true" and
       info_dict.get("verity_key")):
@@ -568,9 +579,10 @@
     cmd.extend([path, img.name,
                 info_dict["verity_key"] + ".pk8",
                 info_dict["verity_key"] + ".x509.pem", img.name])
-    p = Run(cmd, stdout=subprocess.PIPE)
-    p.communicate()
-    assert p.returncode == 0, "boot_signer of %s image failed" % path
+    proc = Run(cmd)
+    output, _ = proc.communicate()
+    assert proc.returncode == 0, \
+        "Failed to run boot_signer of {} image:\n{}".format(path, output)
 
   # Sign the image if vboot is non-empty.
   elif info_dict.get("vboot"):
@@ -588,9 +600,10 @@
            info_dict["vboot_subkey"] + ".vbprivk",
            img_keyblock.name,
            img.name]
-    p = Run(cmd, stdout=subprocess.PIPE)
-    p.communicate()
-    assert p.returncode == 0, "vboot_signer of %s image failed" % path
+    proc = Run(cmd)
+    proc.communicate()
+    assert proc.returncode == 0, \
+        "Failed to run vboot_signer of {} image:\n{}".format(path, output)
 
     # Clean up the temp files.
     img_unsigned.close()
@@ -607,10 +620,11 @@
     args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
     if args and args.strip():
       cmd.extend(shlex.split(args))
-    p = Run(cmd, stdout=subprocess.PIPE)
-    p.communicate()
-    assert p.returncode == 0, "avbtool add_hash_footer of %s failed" % (
-        partition_name,)
+    proc = Run(cmd)
+    output, _ = proc.communicate()
+    assert proc.returncode == 0, \
+        "Failed to run 'avbtool add_hash_footer' of {}:\n{}".format(
+            partition_name, output)
 
   img.seek(os.SEEK_SET, 0)
   data = img.read()
@@ -682,9 +696,9 @@
     cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
     if pattern is not None:
       cmd.extend(pattern)
-    p = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    stdoutdata, _ = p.communicate()
-    if p.returncode != 0:
+    proc = Run(cmd)
+    stdoutdata, _ = proc.communicate()
+    if proc.returncode != 0:
       raise ExternalError(
           "Failed to unzip input target-files \"{}\":\n{}".format(
               filename, stdoutdata))
@@ -926,15 +940,14 @@
               key + OPTIONS.private_key_suffix,
               input_name, output_name])
 
-  p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE,
-          stderr=subprocess.STDOUT)
+  proc = Run(cmd, stdin=subprocess.PIPE)
   if password is not None:
     password += "\n"
-  stdoutdata, _ = p.communicate(password)
-  if p.returncode != 0:
+  stdoutdata, _ = proc.communicate(password)
+  if proc.returncode != 0:
     raise ExternalError(
         "Failed to run signapk.jar: return code {}:\n{}".format(
-            p.returncode, stdoutdata))
+            proc.returncode, stdoutdata))
 
 
 def CheckSize(data, target, info_dict):
@@ -1267,8 +1280,7 @@
         first_line = i + 4
     f.close()
 
-    p = Run([self.editor, "+%d" % (first_line,), self.pwfile])
-    _, _ = p.communicate()
+    Run([self.editor, "+%d" % (first_line,), self.pwfile]).communicate()
 
     return self.ReadFile()
 
@@ -1396,10 +1408,10 @@
   if isinstance(entries, basestring):
     entries = [entries]
   cmd = ["zip", "-d", zip_filename] + entries
-  proc = Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+  proc = Run(cmd)
   stdoutdata, _ = proc.communicate()
-  assert proc.returncode == 0, "Failed to delete %s:\n%s" % (entries,
-                                                             stdoutdata)
+  assert proc.returncode == 0, \
+      "Failed to delete {}:\n{}".format(entries, stdoutdata)
 
 
 def ZipClose(zip_file):
@@ -1860,9 +1872,9 @@
                     '--output={}.new.dat.br'.format(self.path),
                     '{}.new.dat'.format(self.path)]
       print("Compressing {}.new.dat with brotli".format(self.partition))
-      p = Run(brotli_cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-      stdoutdata, _ = p.communicate()
-      assert p.returncode == 0, \
+      proc = Run(brotli_cmd)
+      stdoutdata, _ = proc.communicate()
+      assert proc.returncode == 0, \
           'Failed to compress {}.new.dat with brotli:\n{}'.format(
               self.partition, stdoutdata)
 
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 755eda9..7ea53f8 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -394,8 +394,7 @@
       signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
       cmd.extend(["-out", signing_key])
 
-      get_signing_key = common.Run(cmd, verbose=False, stdout=subprocess.PIPE,
-                                   stderr=subprocess.STDOUT)
+      get_signing_key = common.Run(cmd, verbose=False)
       stdoutdata, _ = get_signing_key.communicate()
       assert get_signing_key.returncode == 0, \
           "Failed to get signing key: {}".format(stdoutdata)
@@ -411,7 +410,7 @@
     """Signs the given input file. Returns the output filename."""
     out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
     cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
-    signing = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    signing = common.Run(cmd)
     stdoutdata, _ = signing.communicate()
     assert signing.returncode == 0, \
         "Failed to sign the input file: {}".format(stdoutdata)
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index a73746e..cc7b887 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -16,7 +16,6 @@
 
 import os
 import os.path
-import subprocess
 import unittest
 import zipfile
 
@@ -45,9 +44,11 @@
 
     # Calls an external binary to convert the proto message.
     cmd = ["care_map_generator", "--parse_proto", file_name, text_file]
-    p = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
-    p.communicate()
-    self.assertEqual(0, p.returncode)
+    proc = common.Run(cmd)
+    output, _ = proc.communicate()
+    self.assertEqual(
+        0, proc.returncode,
+        "Failed to run care_map_generator:\n{}".format(output))
 
     with open(text_file, 'r') as verify_fp:
       plain_text = verify_fp.read()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 1d8a786..29e0d83 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -17,7 +17,6 @@
 import copy
 import os
 import os.path
-import subprocess
 import unittest
 import zipfile
 
@@ -1024,11 +1023,11 @@
            '--signature_size', str(self.SIGNATURE_SIZE),
            '--metadata_hash_file', metadata_sig_file,
            '--payload_hash_file', payload_sig_file]
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     self.assertEqual(
         0, proc.returncode,
-        'Failed to run brillo_update_payload: {}'.format(stdoutdata))
+        'Failed to run brillo_update_payload:\n{}'.format(stdoutdata))
 
     signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
 
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index 0aaf069..ecb7fde 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -21,7 +21,6 @@
 import os
 import os.path
 import shutil
-import subprocess
 import unittest
 
 import build_image
@@ -44,7 +43,7 @@
       kernel_fp.write(os.urandom(10))
 
     cmd = ['mkbootimg', '--kernel', kernel, '-o', output_file]
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     self.assertEqual(
         0, proc.returncode,
@@ -53,7 +52,7 @@
     cmd = ['boot_signer', '/boot', output_file,
            os.path.join(self.testdata_dir, 'testkey.pk8'),
            os.path.join(self.testdata_dir, 'testkey.x509.pem'), output_file]
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     self.assertEqual(
         0, proc.returncode,
@@ -123,7 +122,7 @@
     system_root = common.MakeTempDir()
     cmd = ['mkuserimg_mke2fs', '-s', system_root, output_file, 'ext4',
            '/system', str(image_size), '-j', '0']
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     self.assertEqual(
         0, proc.returncode,
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 09f800f..1cc4a60 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -35,7 +35,6 @@
 import logging
 import os.path
 import re
-import subprocess
 import zipfile
 
 import common
@@ -256,7 +255,7 @@
         continue
 
       cmd = ['boot_signer', '-verify', image_path, '-certificate', verity_key]
-      proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+      proc = common.Run(cmd)
       stdoutdata, _ = proc.communicate()
       assert proc.returncode == 0, \
           'Failed to verify {} with boot_signer:\n{}'.format(image, stdoutdata)
@@ -299,7 +298,7 @@
         continue
 
       cmd = ['verity_verifier', image_path, '-mincrypt', verity_key_mincrypt]
-      proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+      proc = common.Run(cmd)
       stdoutdata, _ = proc.communicate()
       assert proc.returncode == 0, \
           'Failed to verify {} with verity_verifier (key: {}):\n{}'.format(
@@ -328,7 +327,7 @@
             partition, info_dict, options[key_name])
         cmd.extend(["--expected_chain_partition", chained_partition_arg])
 
-    proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    proc = common.Run(cmd)
     stdoutdata, _ = proc.communicate()
     assert proc.returncode == 0, \
         'Failed to verify {} with verity_verifier (key: {}):\n{}'.format(
diff --git a/tools/uuidgen.py b/tools/uuidgen.py
deleted file mode 100755
index d3091a7..0000000
--- a/tools/uuidgen.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright 2018 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from __future__ import print_function
-import sys
-import uuid
-
-def uuidgen(name):
-    return uuid.uuid5(uuid.uuid5(uuid.NAMESPACE_URL, "android.com"), name)
-
-if __name__ == "__main__":
-    if len(sys.argv) < 2:
-        print("Usage: uuidgen.py <name>")
-        sys.exit(1)
-    name = sys.argv[1]
-    print(uuidgen(name))