Merge "Set LOCAL_SDK_VERSION := current for RROs"
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 39441e1..7d42fc9 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -515,6 +515,8 @@
 # Clean up old ninja files
 $(call add-clean-step, rm -f $(OUT_DIR)/build-*-dist*.ninja)
 
+$(call add-clean-step, rm -f $(HOST_OUT)/*ts/host-libprotobuf-java-*.jar)
+
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
 # ************************************************
diff --git a/core/Makefile b/core/Makefile
index 6721757..2b602b8 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -407,7 +407,6 @@
 			TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
 			TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
 			TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
-			TARGET_AAPT_CHARACTERISTICS="$(TARGET_AAPT_CHARACTERISTICS)" \
 	        bash $(BUILDINFO_SH) >> $@
 	$(hide) $(foreach file,$(system_prop_file), \
 		if [ -f "$(file)" ]; then \
@@ -496,7 +495,8 @@
 endif  # BOARD_USES_PRODUCTIMAGE
 	$(hide) echo "#" >> $@; \
 	        echo "# ADDITIONAL PRODUCT PROPERTIES" >> $@; \
-	        echo "#" >> $@;
+	        echo "#" >> $@; \
+          echo "ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)" >> $@;
 	$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
 		echo "$(line)" >> $@;)
 	$(hide) build/make/tools/post_process_props.py $@
@@ -1784,12 +1784,12 @@
     $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk), \
         $(ALL_PDK_FUSION_FILES))
 
-INTERNAL_SYSTEMIMAGE_FILES := $(filter $(TARGET_OUT)/%, \
+INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES) \
     $(PDK_FUSION_SYSIMG_FILES) \
     $(RECOVERY_RESOURCE_ZIP)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
+    $(PDK_FUSION_SYMLINK_STAMP))
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
 
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 57fd818..9c5c69d 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -508,8 +508,9 @@
       $(eval _src_base := $(call word-colon,1,$(td))), \
       $(eval _src_base := $(LOCAL_PATH)) \
         $(eval _file := $(call word-colon,1,$(td)))) \
-    $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
-    $(if $(filter /%,$(_src_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_src_base) $(_file))) \
+    $(if $(call streq,$(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK)),, \
+      $(if $(findstring ..,$(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include '..': $(_file))) \
+      $(if $(filter /%,$(_src_base) $(_file)),$(error $(LOCAL_MODULE_MAKEFILE): LOCAL_TEST_DATA may not include absolute paths: $(_src_base) $(_file)))) \
     $(eval my_test_data_file_pairs := $(my_test_data_file_pairs) $(call append-path,$(_src_base),$(_file)):$(_file)) \
     $(call append-path,$(_src_base),$(_file)):$(call append-path,$(my_module_path),$(_file))))
 
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 98e01da..63582c2 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -2,9 +2,6 @@
 
 LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_PATH)/../lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
 
-CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
-LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
-
 define convert-to-clang-flags
 $(strip $(filter-out $(CLANG_CONFIG_UNKNOWN_CFLAGS),$(1)))
 endef
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 7f8e9c3..34a1db8 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -254,6 +254,8 @@
 LOCAL_SDK_VERSION:=
 LOCAL_SHARED_ANDROID_LIBRARIES:=
 LOCAL_SHARED_LIBRARIES:=
+LOCAL_SOONG_AAR :=
+LOCAL_SOONG_BUNDLE :=
 LOCAL_SOONG_CLASSES_JAR :=
 LOCAL_SOONG_DEX_JAR :=
 LOCAL_SOONG_EXPORT_PROGUARD_FLAGS :=
diff --git a/core/config.mk b/core/config.mk
index 3289a89..c1ea5a8 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -204,6 +204,11 @@
 JAVA_TMPDIR_ARG :=
 endif
 
+# A list of the jars that provide information about usages of the hidden API.
+# The core-oj-hiddenapi provides information for the core-oj jar.
+HIDDENAPI_EXTRA_APP_USAGE_JARS := \
+    core-oj-hiddenapi \
+
 # Default to remove the org.apache.http.legacy from bootclasspath
 ifeq ($(REMOVE_OAHL_FROM_BCP),)
 REMOVE_OAHL_FROM_BCP := true
@@ -527,8 +532,8 @@
 ifdef PDK_FUSION_PLATFORM_ZIP
 TARGET_BUILD_PDK := true
 ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
-  ifneq (,$(wildcard $(dir $(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
-    PDK_FUSION_PLATFORM_DIR := $(dir $(PDK_FUSION_PLATFORM_ZIP))
+  ifneq (,$(wildcard $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
+    PDK_FUSION_PLATFORM_DIR := $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))
     PDK_FUSION_PLATFORM_ZIP :=
   else
     $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index be1b124..fcf527e 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -180,7 +180,7 @@
 
 ifneq ($(filter hwaddress,$(my_sanitize)),)
   my_shared_libraries += $($(LOCAL_2ND_ARCH_VAR_PREFIX)HWADDRESS_SANITIZER_RUNTIME_LIBRARY)
-  ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
     ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
       my_static_libraries := $(my_static_libraries) $($(LOCAL_2ND_ARCH_VAR_PREFIX)HWADDRESS_SANITIZER_STATIC_LIBRARY)
     endif
diff --git a/core/definitions.mk b/core/definitions.mk
index c9df700..0e959d6 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2672,9 +2672,10 @@
       $(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) $(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
 	@rm -rf $(dir $(2))
 	@mkdir -p $(dir $(2))
-	find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | xargs -I{} cp -f {} $(dir $(2))/; \
-	find $(dir $(2)) -maxdepth 1 -name "classes*.dex" | sort | sed 's/^/--dex=/' \
-	| xargs $(HIDDENAPI) encode \
+	for INPUT_DEX in `find $(dir $(1)) -maxdepth 1 -name "classes*.dex" | sort`; do \
+	    echo "--input-dex=$$$${INPUT_DEX}"; \
+	    echo "--output-dex=$(dir $(2))/`basename $$$${INPUT_DEX}`"; \
+	done | xargs $(HIDDENAPI) encode \
 	    --light-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_LIGHT_GREYLIST) \
 	    --dark-greylist=$(INTERNAL_PLATFORM_HIDDENAPI_DARK_GREYLIST) \
 	    --blacklist=$(INTERNAL_PLATFORM_HIDDENAPI_BLACKLIST)
@@ -3006,14 +3007,17 @@
 # 1. Copy the files to the many suite output directories.
 #    And for test config files, we'll check the .xml is well-formed before copy.
 # 2. Add all the files to each suite's dependent files list.
-# 3. Do the dependency addition to my_all_targets
+# 3. Do the dependency addition to my_all_targets.
+# 4. Save the module name to COMPATIBILITY.$(suite).MODULES for each suite.
 # Requires for each suite: use my_compat_dist_config_$(suite) to define the test config.
 #    and use my_compat_dist_$(suite) to define the others.
 define create-suite-dependencies
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval COMPATIBILITY.$(suite).FILES := \
     $$(COMPATIBILITY.$(suite).FILES) $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
-      $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))))) \
+      $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f)))) \
+  $(eval COMPATIBILITY.$(suite).MODULES := \
+    $$(COMPATIBILITY.$(suite).MODULES) $$(my_register_name))) \
 $(eval $(my_all_targets) : $(call copy-many-files, \
   $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))) \
   $(call copy-many-xml-files-checked, \
diff --git a/core/host_java_library_common.mk b/core/host_java_library_common.mk
index 8df4b37..0e62f60 100644
--- a/core/host_java_library_common.mk
+++ b/core/host_java_library_common.mk
@@ -32,15 +32,15 @@
 proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
 ifneq ($(proto_sources),)
 ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
-    LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-micro
+    LOCAL_JAVA_LIBRARIES += libprotobuf-java-micro
 else
   ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nano)
-    LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-nano
+    LOCAL_JAVA_LIBRARIES += libprotobuf-java-nano
   else
     ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),full)
-      LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-full
+      LOCAL_JAVA_LIBRARIES += libprotobuf-java-full
     else
-      LOCAL_JAVA_LIBRARIES += host-libprotobuf-java-lite
+      LOCAL_JAVA_LIBRARIES += libprotobuf-java-lite
     endif
   endif
 endif
diff --git a/core/main.mk b/core/main.mk
index 6374be9..fdf14de 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -221,13 +221,11 @@
 # Enable dynamic linker and hidden API developer warnings for
 # userdebug, eng and non-REL builds
 ifneq ($(TARGET_BUILD_VARIANT),user)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
-                                 ro.art.hiddenapi.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
 else
 # Enable it for user builds as long as they are not final.
 ifneq ($(PLATFORM_VERSION_CODENAME),REL)
-  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1 \
-                                 ro.art.hiddenapi.warning=1
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
 endif
 endif
 
@@ -727,11 +725,44 @@
   $(eval r := $(filter $($(root))/%,$(call module-installed-files,\
     $(deps))))\
   $(if $(filter $(1),HOST_),\
+    $(eval ALL_MODULES.$(mod).HOST_SHARED_LIBRARY_FILES := $$(ALL_MODULES.$(mod).HOST_SHARED_LIBRARY_FILES) $(word 2,$(p)) $(r))\
+    $(eval ALL_MODULES.$(mod).HOST_SHARED_LIBRARIES := $$(ALL_MODULES.$(mod).HOST_SHARED_LIBRARIES) $(deps))\
     $(eval $(call add-required-host-so-deps,$(word 2,$(p)),$(r))),\
     $(eval $(call add-required-deps,$(word 2,$(p)),$(r))))\
   $(eval ALL_MODULES.$(mod).REQUIRED += $(deps)))
 endef
 
+# Recursively resolve host shared library dependency for a given module.
+# $(1): module name
+# Returns all dependencies of shared library.
+define get-all-shared-libs-deps
+$(if $(_all_deps_for_$(1)_set_),$(_all_deps_for_$(1)_),\
+  $(eval _all_deps_for_$(1)_ :=) \
+  $(foreach dep,$(ALL_MODULES.$(1).HOST_SHARED_LIBRARIES),\
+    $(foreach m,$(call get-all-shared-libs-deps,$(dep)),\
+      $(eval _all_deps_for_$(1)_ := $$(_all_deps_for_$(1)_) $(m))\
+      $(eval _all_deps_for_$(1)_ := $(sort $(_all_deps_for_$(1)_))))\
+    $(eval _all_deps_for_$(1)_ := $$(_all_deps_for_$(1)_) $(dep))\
+    $(eval _all_deps_for_$(1)_ := $(sort $(_all_deps_for_$(1)_) $(dep)))\
+    $(eval _all_deps_for_$(1)_set_ := true))\
+$(_all_deps_for_$(1)_))
+endef
+
+# Scan all modules in general-tests and device-tests suite and flatten the
+# shared library dependencies.
+define update-host-shared-libs-deps-for-suites
+$(foreach suite,general-tests device-tests,\
+  $(foreach m,$(COMPATIBILITY.$(suite).MODULES),\
+    $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
+    $(foreach dep,$(my_deps),\
+      $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
+        $(eval target := $(HOST_OUT_TESTCASES)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
+          $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
+        $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
+          $(sort $(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES)))))))
+endef
+
 $(call resolve-shared-libs-depes,TARGET_)
 ifdef TARGET_2ND_ARCH
 $(call resolve-shared-libs-depes,TARGET_,true)
@@ -740,6 +771,9 @@
 ifdef HOST_2ND_ARCH
 $(call resolve-shared-libs-depes,HOST_,true)
 endif
+# Update host side shared library dependencies for tests in suite device-tests and general-tests.
+# This should be called after calling resolve-shared-libs-depes for HOST_2ND_ARCH.
+$(call update-host-shared-libs-deps-for-suites)
 ifdef HOST_CROSS_OS
 $(call resolve-shared-libs-depes,HOST_CROSS_,,true)
 endif
@@ -1303,6 +1337,12 @@
   # Dist the installed files if they exist.
   apps_only_installed_files := $(foreach m,$(unbundled_build_modules),$(ALL_MODULES.$(m).INSTALLED))
   $(call dist-for-goals,apps_only, $(apps_only_installed_files))
+
+  # Dist the bundle files if they exist.
+  apps_only_bundle_files := $(foreach m,$(unbundled_build_modules),\
+    $(if $(ALL_MODULES.$(m).BUNDLE),$(ALL_MODULES.$(m).BUNDLE):$(m)-base.zip))
+  $(call dist-for-goals,apps_only, $(apps_only_bundle_files))
+
   # For uninstallable modules such as static Java library, we have to dist the built file,
   # as <module_name>.<suffix>
   apps_only_dist_built_files := $(foreach m,$(unbundled_build_modules),$(if $(ALL_MODULES.$(m).INSTALLED),,\
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 9a52af7..3e68c2f 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -194,6 +194,11 @@
 my_res_package := $(intermediates)/package-res.apk
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 
+ifeq ($(LOCAL_USE_AAPT2),true)
+  my_bundle_module := $(intermediates)/base.zip
+  LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
+endif
+
 # Always run aapt2, because we need to at least compile the AndroidManifest.xml.
 need_compile_res := true
 
@@ -674,6 +679,55 @@
 	$(compress-package)
 endif  # LOCAL_COMPRESSED_MODULE
 
+ifeq ($(LOCAL_USE_AAPT2),true)
+  my_package_res_pb := $(intermediates)/package-res.pb.apk
+  $(my_package_res_pb): $(my_res_package) $(AAPT2)
+	$(AAPT2) convert --output-format proto $< -o $@
+
+  $(my_bundle_module): $(my_package_res_pb)
+  $(my_bundle_module): PRIVATE_RES_PACKAGE := $(my_package_res_pb)
+
+  $(my_bundle_module): $(jni_shared_libraries)
+  $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
+  $(my_bundle_module): PRIVATE_JNI_SHARED_LIBRARIES_ABI := $(jni_shared_libraries_abis)
+
+  ifneq ($(full_classes_jar),)
+    $(my_bundle_module): PRIVATE_DEX_FILE := $(built_dex)
+    # Use the jarjar processed archive as the initial package file.
+    $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE := $(full_classes_pre_proguard_jar)
+    $(my_bundle_module): $(built_dex)
+  else
+    $(my_bundle_module): PRIVATE_DEX_FILE :=
+    $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
+  endif # full_classes_jar
+
+  $(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
+	@echo "target Bundle: $(PRIVATE_MODULE) ($@)"
+	rm -rf $@.parts
+	mkdir -p $@.parts
+	$(ZIP2ZIP) -i $(PRIVATE_RES_PACKAGE) -o $@.parts/apk.zip AndroidManifest.xml:manifest/AndroidManifest.xml resources.pb "res/**/*" "assets/**/*"
+        ifneq ($(jni_shared_libraries),)
+	  $(call create-jni-shared-libs-package,$@.parts/jni.zip)
+        endif
+        ifeq ($(full_classes_jar),)
+        # We don't build jar, need to add the Java resources here.
+	  $(if $(PRIVATE_EXTRA_JAR_ARGS),\
+	    $(call create-java-resources-jar,$@.parts/res.zip) && \
+	    $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/" && \
+	    mv -f $@.parts/res.zip.tmp $@.parts/res.zip)
+        else  # full_classes_jar
+	  $(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
+	  $(ZIP2ZIP) -i $@.parts/dex.zip -o $@.parts/dex.zip.tmp "classes*.dex:dex/"
+	  mv -f $@.parts/dex.zip.tmp $@.parts/dex.zip
+	  $(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
+	  $(ZIP2ZIP) -i $@.parts/res.zip -o $@.parts/res.zip.tmp "**/*:root/"
+	  mv -f $@.parts/res.zip.tmp $@.parts/res.zip
+        endif  # full_classes_jar
+	$(MERGE_ZIPS) $@ $@.parts/*.zip
+	rm -rf $@.parts
+  ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(my_bundle_module)
+endif
+
 ###############################
 ## Build dpi-specific apks, if it's apps_only build.
 ifdef TARGET_BUILD_APPS
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
index 9aabd0f..235acf9 100644
--- a/core/pdk_fusion_modules.mk
+++ b/core/pdk_fusion_modules.mk
@@ -37,12 +37,14 @@
 
 # The source prebuilts are extracted in the rule of _pdk_fusion_stamp.
 # Use a touch rule to establish the dependency.
+ifndef PDK_FUSION_PLATFORM_DIR
 $(3) $(11) : $(_pdk_fusion_stamp)
 	$(hide) if [ ! -f $$@ ]; then \
 	  echo 'Error: $$@ does not exist. Check your platform.zip.' 1>&2; \
 	  exit 1; \
 	fi
 	$(hide) touch $$@
+endif
 endef
 
 # We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 31cb28e..837920f 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -102,6 +102,10 @@
 
 PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
 
+ifdef LOCAL_SOONG_BUNDLE
+  ALL_MODULES.$(LOCAL_MODULE).BUNDLE := $(LOCAL_SOONG_BUNDLE)
+endif
+
 ifndef LOCAL_IS_HOST_MODULE
 ifeq ($(LOCAL_SDK_VERSION),system_current)
 my_link_type := java:system
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index f213563..ae67fb8 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -8,7 +8,6 @@
   $(call pretty-error,soong_cc_prebuilt.mk may only be used from Soong)
 endif
 
-skip_module :=
 ifdef LOCAL_IS_HOST_MODULE
   ifneq ($(HOST_OS),$(LOCAL_MODULE_HOST_OS))
     my_prefix := HOST_CROSS_
@@ -31,6 +30,7 @@
   $(call pretty-error,Unsupported LOCAL_MODULE_$(my_prefix)ARCH=$(LOCAL_MODULE_$(my_prefix)ARCH))
 endif
 
+skip_module :=
 ifeq ($(TARGET_TRANSLATE_2ND_ARCH),true)
   ifndef LOCAL_IS_HOST_MODULE
     ifdef LOCAL_2ND_ARCH_VAR_PREFIX
@@ -177,6 +177,22 @@
 
 $(LOCAL_BUILT_MODULE): $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
+# We don't care about installed static libraries, since the libraries have
+# already been linked into the module at that point. We do, however, care
+# about the NOTICE files for any static libraries that we use.
+# (see notice_files.mk)
+#
+# Filter out some NDK libraries that are not being exported.
+my_static_libraries := \
+    $(filter-out ndk_libc++_static ndk_libc++abi ndk_libandroid_support ndk_libunwind, \
+      $(LOCAL_STATIC_LIBRARIES))
+installed_static_library_notice_file_targets := \
+    $(foreach lib,$(my_static_libraries) $(LOCAL_WHOLE_STATIC_LIBRARIES), \
+      NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST,TARGET)-STATIC_LIBRARIES-$(lib))
+
+$(notice_target): | $(installed_static_library_notice_file_targets)
+$(LOCAL_INSTALLED_MODULE): | $(notice_target)
+
 endif # !skip_module
 
 skip_module :=
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 288a763..3e6b261 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -120,6 +120,15 @@
 
     java-dex : $(LOCAL_BUILT_MODULE)
   else  # LOCAL_UNINSTALLABLE_MODULE
+
+    ifneq ($(filter $(LOCAL_MODULE),$(HIDDENAPI_EXTRA_APP_USAGE_JARS)),)
+      # Derive greylist from classes.jar.
+      # We use full_classes_jar here, which is the post-proguard jar (on the basis that we also
+      # have a full_classes_pre_proguard_jar). This is consistent with the equivalent code in
+      # java.mk.
+      $(eval $(call hiddenapi-generate-greylist-txt,$(full_classes_jar),$(hiddenapi_whitelist_txt),$(hiddenapi_greylist_txt),$(hiddenapi_darkgreylist_txt),$(hiddenapi_greylist_metadata_csv)))
+    endif
+
     $(eval $(call copy-one-file,$(full_classes_jar),$(LOCAL_BUILT_MODULE)))
     $(eval $(call copy-one-file,$(LOCAL_SOONG_DEX_JAR),$(common_javalib.jar)))
     java-dex : $(common_javalib.jar)
@@ -128,6 +137,10 @@
   $(eval $(call copy-one-file,$(full_classes_jar),$(LOCAL_BUILT_MODULE)))
 endif  # LOCAL_SOONG_DEX_JAR
 
+ifdef LOCAL_SOONG_AAR
+  ALL_MODULES.$(LOCAL_MODULE).AAR := $(LOCAL_SOONG_AAR)
+endif
+
 javac-check : $(full_classes_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_jar)
 .PHONY: javac-check-$(LOCAL_MODULE)
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index a2b626e..0028ce4 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -18,12 +18,16 @@
 device-tests-zip := $(PRODUCT_OUT)/device-tests.zip
 # Create an artifact to include a list of test config files in device-tests.
 device-tests-list-zip := $(PRODUCT_OUT)/device-tests_list.zip
+my_host_shared_lib_for_device_tests := $(call copy-many-files,$(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES))
 $(device-tests-zip) : .KATI_IMPLICIT_OUTPUTS := $(device-tests-list-zip)
 $(device-tests-zip) : PRIVATE_device_tests_list := $(PRODUCT_OUT)/device-tests_list
-
-$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(SOONG_ZIP)
+$(device-tests-zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_device_tests)
+$(device-tests-zip) : $(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests) $(SOONG_ZIP)
 	echo $(sort $(COMPATIBILITY.device-tests.FILES)) | tr " " "\n" > $@.list
 	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $@-host.list; \
+	done
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
 	rm -f $(PRIVATE_device_tests_list)
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 9619bbe..36ab101 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -22,17 +22,33 @@
 general_tests_zip := $(PRODUCT_OUT)/general-tests.zip
 # Create an artifact to include a list of test config files in general-tests.
 general_tests_list_zip := $(PRODUCT_OUT)/general-tests_list.zip
+
+# Filter shared entries between general-tests and device-tests's HOST_SHARED_LIBRARY.FILES,
+# to avoid warning about overriding commands.
+my_host_shared_lib_for_general_tests := \
+  $(foreach m,$(filter $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\
+	   $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES)),$(call word-colon,2,$(m)))
+my_general_tests_shared_lib_files := \
+  $(filter-out $(COMPATIBILITY.device-tests.HOST_SHARED_LIBRARY.FILES),\
+	 $(COMPATIBILITY.general-tests.HOST_SHARED_LIBRARY.FILES))
+
+my_host_shared_lib_for_general_tests += $(call copy-many-files,$(my_general_tests_shared_lib_files))
+
 $(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
 $(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip)
 $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
 $(general_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
-$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(SOONG_ZIP)
+$(general_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_general_tests)
+$(general_tests_zip) : $(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(my_host_shared_lib_for_general_tests) $(SOONG_ZIP)
 	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
 	rm -f $@ $(PRIVATE_general_tests_list_zip)
 	mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
 	echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
 	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
+	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/host.list; \
+	done
 	cp -fp $(PRIVATE_TOOLS) $(PRIVATE_INTERMEDIATES_DIR)/tools/
 	$(SOONG_ZIP) -d -o $@ \
 	  -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index a3c9ac7..7b70c86 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -68,6 +68,7 @@
     ExtServices \
     ExtShared \
     fastboot \
+    flags_health_check \
     framework \
     framework-res \
     framework-sysconfig.xml \
@@ -308,7 +309,7 @@
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
     adb_keys \
-    apex_debug_key \
+    apex.test.key \
     iotop \
     logpersist.start \
     perfprofd \
diff --git a/target/product/mainline_system.mk b/target/product/mainline_system.mk
index ed6dcc9..b0edb56 100644
--- a/target/product/mainline_system.mk
+++ b/target/product/mainline_system.mk
@@ -67,6 +67,8 @@
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
+PRODUCT_LOCALES := en_US af_ZA am_ET ar_EG as_IN az_AZ be_BY bg_BG bn_BD bs_BA ca_ES cs_CZ da_DK de_DE el_GR en_AU en_CA en_GB en_IN es_ES es_US et_EE eu_ES fa_IR fi_FI fr_CA fr_FR gl_ES gu_IN hi_IN hr_HR hu_HU hy_AM in_ID is_IS it_IT iw_IL ja_JP ka_GE kk_KZ km_KH ko_KR ky_KG lo_LA lt_LT lv_LV km_MH kn_IN mn_MN ml_IN mk_MK mr_IN ms_MY my_MM ne_NP nb_NO nl_NL or_IN pa_IN pl_PL pt_BR pt_PT ro_RO ru_RU si_LK sk_SK sl_SI sq_AL sr_Latn_RS sr_RS sv_SE sw_TZ ta_IN te_IN th_TH tl_PH tr_TR uk_UA ur_PK uz_UZ vi_VN zh_CN zh_HK zh_TW zu_ZA en_XA ar_XB
+
 PRODUCT_NAME := mainline_system
 PRODUCT_BRAND := generic
 
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index 73ebd75..4142ea9 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -12,19 +12,6 @@
 include $(BUILD_PREBUILT)
 
 #######################################
-# apex_debug_key for eng/userdebug
-ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
-  include $(CLEAR_VARS)
-
-  LOCAL_MODULE := apex_debug_key
-  LOCAL_SRC_FILES := $(LOCAL_MODULE)
-  LOCAL_MODULE_CLASS := ETC
-  LOCAL_MODULE_PATH := $(TARGET_OUT)/etc/security/apex
-
-  include $(BUILD_PREBUILT)
-endif
-
-#######################################
 # adb key, if configured via PRODUCT_ADB_KEYS
 ifdef PRODUCT_ADB_KEYS
   ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
diff --git a/target/product/security/apex_debug_key b/target/product/security/apex_debug_key
deleted file mode 100644
index 28bc8f7..0000000
--- a/target/product/security/apex_debug_key
+++ /dev/null
Binary files differ
diff --git a/target/product/security/apex_debug_key.pem b/target/product/security/apex_debug_key.pem
deleted file mode 100644
index bd56778..0000000
--- a/target/product/security/apex_debug_key.pem
+++ /dev/null
@@ -1,51 +0,0 @@
------BEGIN RSA PRIVATE KEY-----
-MIIJKgIBAAKCAgEAt4iSfTF+e2khGQf0bUzTMwWFsgaiQbwQB3cvyBlE9XekFXUt
-GdOEhC2J0p+930UoF6gjjRRrgGF+8K5iV1m3oEbB3qGz6UUOurvVkt4tq96e/Q5a
-ogCOZEuWHjZfs2tQUVNJJtptIp9+0cM768vdf+qnK2JNFIhBqSY0FhjVljKevMcM
-w2tWFRZnKPQ3JoRnWqi5CIauQtBcWRFKIApyf41uHGMjpQRd8aTGeLXBRTi/yD73
-HltuKwSF2SXpj1F+9j4stqskQvipjQnid/Wb+nN3CNgyrGuRrtGvz71WWYcK3DLM
-jvGLOl06QrN6a7ZfLUN4qQjJ6Is5SLTSw/sfFE7Fpcbg6/Geh+jSvChuo6EUtzoX
-Qu42HsVXhrJLQ9/AVTWNmGc9IDr4PMtDiQc4FN8MOpUtR6V/zwrZFoeR3PHl9Z7v
-uTxLIcQLIott0mAjPhbNgbFBs5HP1Z8TfFcyZWpShlx+aM1V2mzYQ7sgsWjFKMSQ
-wIUk/YZ9QK/H5WKjC5M0yxueCU0ocvWFaAZ4RyS/r/SUyQpvyNXNwUsdp1a8sNxp
-LP9U7FG64C+T791yoQJ0sKVbts5SEu/Tojw6miYbH6Fspdo2xxfCbrv6SAbkjlct
-afOnEepgTlHet0G+y0N7OZRJ9WRGyLJNgGjmmDy9XSYGAykwwe4Fv348D0cCAwEA
-AQKCAgBuFra/78NNpXbb++CK+20oCqTyb3Y+dd8rizuXDElH8Fb1JA9EkZLIckRc
-mcMbvPDal9mTU29UV6b8Ga4VdVRnCGpb76TqRKkcK3Vlnm3IzUWSx1xoFmtTD9/h
-CX6IMdPApHOZoaWbAg7hJfm4a9XWV9ukc1eG/GBeZPMTWhwr9vsugztNsQG2rnR8
-pVi7eupAADrVOWwn2bG7H1rWM04Q4rXswy7rWd48BzmhyGxA6FRpehNjGzbPCOx8
-n3gkpp7Ad/T8MVYT8fJKDmbQy/ue1EnPfVeQAwok0dRiiNDV7OH/yVzYVVzNSoSa
-4+uH1qHqlbE3u3TZT0GyMfzG38f4scsbvG/AhH1fuPsy4QcWyLlMV6KUnk3KPc3Q
-yOeRR82qndQMTYQ5/PFiilk7cNbTU0OBjuNpu/t1LIE2J2gGZ5Jw+g2NGtM/xsgC
-jOahpRYvZB8fZ/bSjirwwmSSU+v0ZoPDHtt75R/QxqwPG2jai8kaGr7GEXWJfrfv
-CktMnb6LoCyNiiiZSMUgdDHOQEkVNmt9fxiVaxsaIL4BygropwlD4WbuyRMevfYz
-EffvvmaqC24zJi8WzDszCNLgP/piNhXDyxZX+KaQXj0Do/tzWBBkO0OO6mVGOkX2
-6dadXfhOIggWO8K2lKCUKwWMO9LaKwSwZ4gzcc1a+U9rpE8kUQKCAQEA8lBGLzOL
-Ht8+d13SY+NdPbL6qGvoqsKd5BfIhaNbH04Cp2zQs2TWySxmV47df03pGUpQOCKn
-tFRxoczUrf1gfFDCCC95+A/crls8QJHG+MScTBH5U8Q0s9ReUo/0xaa55u77x5uS
-0fAtdnOdqP8/pf1fSXUJvyLW85LWdkge1c7jk7I5MnWVO2Ak9/GkuRgITSSgVdBa
-kr8nU1BCzDY0gOTWo5J1+NqqVH2eYfEI621iD4SAE3n2JrCC4K/Nt2enEJwup2TR
-ym15g9nClicUQP5Y67eDfqTZu1d0I0Ezl1tL8UPxcLI+ucN4V6KL8RvqTVMnGX/R
-s1FwkPVMQ6dKaQKCAQEAweZeggcSFukr+tTbnzDAHxg4YqiR+30wo7i8NadGu6W/
-EiAdcCdmZYMI9KKc+B/N3cuFqBnaSd7VM7XvINdwZRanRj56Ya8LvQMi0S9YPiRn
-T4TXC3EeewN5+SSO0Dkw83tW1PLqgSINy5ijBs5lGoIYMCC+GSA2DuRBiPpcfhqJ
-kmC9uFQvrsge8CC8Sb1wHCr0Wz34qhPoTff6ZV8wm11Jkb5+tT7PMS5Ft0sEBsxV
-R1JFtLNs0k/YpMb4/OrZFZZSIFCTUVPvHQ1/5BwumVnolBC4LORCaSk1xUOydU9h
-bZd4qzIpFteGLGGRT6nEWC1YejLAvcFHVJiKs1F2LwKCAQEAzgnwA8bCLvgIt5rx
-gLod2I7NkFRhPIHLm92VRf0HSHEe1Jo0Q7Yk5F56j00NjmgDItwLpg/hpfZ/wOLY
-nTFrz4kj0636+jESprcxXn4WQAV+GTjXVqDpZ1fW9EEwEriYLoNbV/kzOIwPPD9G
-+iJATrZJRb7dEMdhGy/qaB0fCxKmdDoBZKSSxjAUfzfbpv+GX4IbS5ykx07+81q1
-0crtjgQHdoLdCUN1ve4qtIEt4nHaBfPWq7jy0ycXwlH6jE74wajsCq4xrPy1bKXH
-TcHg+PrNRXF/wDoQYboVKL0ST0r0IixxqjAGIhLRy0KN1/CypBlmj8od12oSW1AZ
-DxW6sQKCAQEAtIMW8M5MVO/2dam8XFMySMBvncl5PjuqEIFnFjwIaaFAZEtpnIPR
-nCeFKtpIb+aL7TQP1hNbWPIOYfm6CUUH6dRRHeAEZvRjZS+KNlxxNkkFtM3itVA2
-JCd0YjFakxbrL4FfsRgEoPtnBGexPiDflvIOOqAA2btXGD3/lNofSXbDJHbTqMsX
-KQw9YSfYon2t5UtH+bmTyiKGXi/B+KXJxpnuZ7SEmY9DrHF7jcxUj0+jBKbfJf70
-DEcxVRW3rx2jw6kSA+t/enM9ZDqxGVfzOeit0UpPa9uEyAoJeQAxH20rMq+VMyub
-fRxgWOjsMtHFbKGqgPjG3uEU2vi4B4CLGQKCAQEA2Mr5f2AXPR8jca1+Id+CxZpU
-bgMML7gW31L4lGX9Teo9z+zSdN7sIwqe42Zla1N9wda8p5ribnJxwRdxcPL8bid5
-LLlls4xXD/jQCQCFL90X59Tm6VD6tm1VyCjL44nRwAqP4vJObSB5rTqJYtkfVmnp
-KERF5P0i5yv4Oox0ZOsThou9jtyl1dS50Td0Urhp4LhPdmpDPUq25K1sDDfnGFm6
-IcMPkVznRPUoKQCG9DSQcQqttkSV9Po+qfLa3aHtdndfe88Gd9uom8bsAMTZAfSZ
-D4YhqBHSLWrxvtQ8GxkaPITJv7hocwssdFRUj5/UJKJBgUXPBXEXh+fxlDaGQQ==
------END RSA PRIVATE KEY-----
diff --git a/target/product/vndk/Android.mk b/target/product/vndk/Android.mk
index 5d009f9..7953db0 100644
--- a/target/product/vndk/Android.mk
+++ b/target/product/vndk/Android.mk
@@ -42,6 +42,10 @@
 ifeq ($(TARGET_IS_64_BIT)|$(TARGET_2ND_ARCH),true|)
 # TODO(b/110429754) remove this condition when we support 64-bit-only device
 check-vndk-list: ;
+else ifeq ($(TARGET_BUILD_PDK),true)
+# b/118634643: don't check VNDK lib list when building PDK. Some libs (libandroid_net.so
+# and some render-script related ones) can't be built in PDK due to missing frameworks/base.
+check-vndk-list: ;
 else
 check-vndk-list: $(check-vndk-list-timestamp)
 endif
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 5a54462..c2e6f85 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -59,6 +59,5 @@
 if [ -n "$BUILD_THUMBPRINT" ] ; then
   echo "ro.build.thumbprint=$BUILD_THUMBPRINT"
 fi
-echo "ro.build.characteristics=$TARGET_AAPT_CHARACTERISTICS"
 
 echo "# end build properties"
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index dc882f2..6d22694 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -248,20 +248,9 @@
   if fs_type.startswith("squash"):
     fs_spans_partition = False
 
-  is_verity_partition = "verity_block_device" in prop_dict
-  verity_supported = prop_dict.get("verity") == "true"
-  verity_fec_supported = prop_dict.get("verity_fec") == "true"
-
-  avb_footer_type = None
-  if prop_dict.get("avb_hash_enable") == "true":
-    avb_footer_type = "hash"
-  elif prop_dict.get("avb_hashtree_enable") == "true":
-    avb_footer_type = "hashtree"
-
-  if avb_footer_type:
-    avbtool = prop_dict.get("avb_avbtool")
-    avb_signing_args = prop_dict.get(
-        "avb_add_" + avb_footer_type + "_footer_args")
+  # Get a builder for creating an image that's to be verified by Verified Boot,
+  # or None if not applicable.
+  verity_image_builder = verity_utils.CreateVerityImageBuilder(prop_dict)
 
   if (prop_dict.get("use_dynamic_partition_size") == "true" and
       "partition_size" not in prop_dict):
@@ -273,13 +262,8 @@
     size += int(prop_dict.get("partition_reserved_size", BYTES_IN_MB * 16))
     # Round this up to a multiple of 4K so that avbtool works
     size = common.RoundUpTo4K(size)
-    # Adjust partition_size to add more space for AVB footer, to prevent
-    # it from consuming partition_reserved_size.
-    if avb_footer_type:
-      size = verity_utils.AVBCalcMinPartitionSize(
-          size,
-          lambda x: verity_utils.AVBCalcMaxImageSize(
-              avbtool, avb_footer_type, x, avb_signing_args))
+    if verity_image_builder:
+      size = verity_image_builder.CalculateDynamicPartitionSize(size)
     prop_dict["partition_size"] = str(size)
     if fs_type.startswith("ext"):
       if "extfs_inode_count" not in prop_dict:
@@ -297,7 +281,7 @@
         logger.info(
             "Not worth reducing image %d <= %d.", free_size, reserved_size)
       else:
-        size -= free_size + (free_size // 59)
+        size -= free_size + (free_size // 60)
         size += reserved_size
         if block_size <= 4096:
           size = common.RoundUpTo4K(size)
@@ -316,19 +300,8 @@
   prop_dict["image_size"] = prop_dict["partition_size"]
 
   # Adjust the image size to make room for the hashes if this is to be verified.
-  if verity_supported and is_verity_partition:
-    partition_size = int(prop_dict.get("partition_size"))
-    image_size, verity_size = verity_utils.AdjustPartitionSizeForVerity(
-        partition_size, verity_fec_supported)
-    prop_dict["image_size"] = str(image_size)
-    prop_dict["verity_size"] = str(verity_size)
-
-  # Adjust the image size for AVB hash footer or AVB hashtree footer.
-  if avb_footer_type:
-    partition_size = prop_dict["partition_size"]
-    # avb_add_hash_footer_args or avb_add_hashtree_footer_args.
-    max_image_size = verity_utils.AVBCalcMaxImageSize(
-        avbtool, avb_footer_type, partition_size, avb_signing_args)
+  if verity_image_builder:
+    max_image_size = verity_image_builder.CalculateMaxImageSize()
     prop_dict["image_size"] = str(max_image_size)
 
   if fs_type.startswith("ext"):
@@ -441,44 +414,20 @@
   if "partition_headroom" in prop_dict and fs_type.startswith("ext4"):
     CheckHeadroom(mkfs_output, prop_dict)
 
-  if not fs_spans_partition:
-    mount_point = prop_dict.get("mount_point")
-    image_size = int(prop_dict["image_size"])
-    sparse_image_size = verity_utils.GetSimgSize(out_file)
-    if sparse_image_size > image_size:
-      raise BuildImageError(
-          "Error: {} image size of {} is larger than partition size of "
-          "{}".format(mount_point, sparse_image_size, image_size))
-    if verity_supported and is_verity_partition:
-      verity_utils.ZeroPadSimg(out_file, image_size - sparse_image_size)
+  if not fs_spans_partition and verity_image_builder:
+    verity_image_builder.PadSparseImage(out_file)
 
   # Create the verified image if this is to be verified.
-  if verity_supported and is_verity_partition:
-    verity_utils.MakeVerityEnabledImage(
-        out_file, verity_fec_supported, prop_dict)
-
-  # Add AVB HASH or HASHTREE footer (metadata).
-  if avb_footer_type:
-    partition_size = prop_dict["partition_size"]
-    partition_name = prop_dict["partition_name"]
-    # key_path and algorithm are only available when chain partition is used.
-    key_path = prop_dict.get("avb_key_path")
-    algorithm = prop_dict.get("avb_algorithm")
-    salt = prop_dict.get("avb_salt")
-    verity_utils.AVBAddFooter(
-        out_file, avbtool, avb_footer_type, partition_size, partition_name,
-        key_path, algorithm, salt, avb_signing_args)
+  if verity_image_builder:
+    verity_image_builder.Build(out_file)
 
   if run_e2fsck and prop_dict.get("skip_fsck") != "true":
     unsparse_image = UnsparseImage(out_file, replace=False)
 
     # Run e2fsck on the inflated image file
     e2fsck_command = ["e2fsck", "-f", "-n", unsparse_image]
-    # TODO(b/112062612): work around e2fsck failure with SANITIZE_HOST=address
-    env4e2fsck = os.environ.copy()
-    env4e2fsck["ASAN_OPTIONS"] = "detect_odr_violation=0"
     try:
-      common.RunAndCheckOutput(e2fsck_command, env=env4e2fsck)
+      common.RunAndCheckOutput(e2fsck_command)
     finally:
       os.remove(unsparse_image)
 
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index d778d11..a6a8876 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -24,6 +24,7 @@
 import test_utils
 import verity_utils
 from validate_target_files import ValidateVerifiedBootImages
+from verity_utils import CreateVerityImageBuilder
 
 
 class ValidateTargetFilesTest(test_utils.ReleaseToolsTestCase):
@@ -107,10 +108,16 @@
         options)
 
   def _generate_system_image(self, output_file):
-    verity_fec = True
-    partition_size = 1024 * 1024
-    image_size, verity_size = verity_utils.AdjustPartitionSizeForVerity(
-        partition_size, verity_fec)
+    prop_dict = {
+        'partition_size': str(1024 * 1024),
+        'verity': 'true',
+        'verity_block_device': '/dev/block/system',
+        'verity_key' : os.path.join(self.testdata_dir, 'testkey'),
+        'verity_fec': "true",
+        'verity_signer_cmd': 'verity_signer',
+    }
+    verity_image_builder = CreateVerityImageBuilder(prop_dict)
+    image_size = verity_image_builder.CalculateMaxImageSize()
 
     # Use an empty root directory.
     system_root = common.MakeTempDir()
@@ -124,15 +131,7 @@
             stdoutdata))
 
     # Append the verity metadata.
-    prop_dict = {
-        'partition_size' : str(partition_size),
-        'image_size' : str(image_size),
-        'verity_block_device' : '/dev/block/system',
-        'verity_key' : os.path.join(self.testdata_dir, 'testkey'),
-        'verity_signer_cmd' : 'verity_signer',
-        'verity_size' : str(verity_size),
-    }
-    verity_utils.MakeVerityEnabledImage(output_file, verity_fec, prop_dict)
+    verity_image_builder.Build(output_file)
 
   def test_ValidateVerifiedBootImages_systemImage(self):
     input_tmp = common.MakeTempDir()
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index 0988d8e..41f234b 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -25,10 +25,11 @@
 from rangelib import RangeSet
 from test_utils import get_testdata_dir, ReleaseToolsTestCase
 from verity_utils import (
-    AdjustPartitionSizeForVerity, AVBCalcMinPartitionSize, BLOCK_SIZE,
-    CreateHashtreeInfoGenerator, HashtreeInfo, MakeVerityEnabledImage,
+    CreateHashtreeInfoGenerator, CreateVerityImageBuilder, HashtreeInfo,
     VerifiedBootVersion1HashtreeInfoGenerator)
 
+BLOCK_SIZE = common.BLOCK_SIZE
+
 
 class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase):
 
@@ -64,8 +65,17 @@
 
   def _generate_image(self):
     partition_size = 1024 * 1024
-    adjusted_size, verity_size = AdjustPartitionSizeForVerity(
-        partition_size, True)
+    prop_dict = {
+        'partition_size': str(partition_size),
+        'verity': 'true',
+        'verity_block_device': '/dev/block/system',
+        'verity_key': os.path.join(self.testdata_dir, 'testkey'),
+        'verity_fec': 'true',
+        'verity_signer_cmd': 'verity_signer',
+    }
+    verity_image_builder = CreateVerityImageBuilder(prop_dict)
+    self.assertIsNotNone(verity_image_builder)
+    adjusted_size = verity_image_builder.CalculateMaxImageSize()
 
     raw_image = ""
     for i in range(adjusted_size):
@@ -74,15 +84,7 @@
     output_file = self._create_simg(raw_image)
 
     # Append the verity metadata.
-    prop_dict = {
-        'partition_size': str(partition_size),
-        'image_size': str(adjusted_size),
-        'verity_block_device': '/dev/block/system',
-        'verity_key': os.path.join(self.testdata_dir, 'testkey'),
-        'verity_signer_cmd': 'verity_signer',
-        'verity_size': str(verity_size),
-    }
-    MakeVerityEnabledImage(output_file, True, prop_dict)
+    verity_image_builder.Build(output_file)
 
     return output_file
 
@@ -163,23 +165,33 @@
     self.assertEqual(self.expected_root_hash, info.root_hash)
 
 
-class VerityUtilsTest(ReleaseToolsTestCase):
+class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase):
 
   def setUp(self):
-    # To test AVBCalcMinPartitionSize(), by using 200MB to 2GB image size.
+    # To test CalculateMinPartitionSize(), by using 200MB to 2GB image size.
     #   -  51200 = 200MB * 1024 * 1024 / 4096
     #   - 524288 = 2GB * 1024 * 1024 * 1024 / 4096
     self._image_sizes = [BLOCK_SIZE * random.randint(51200, 524288) + offset
                          for offset in range(BLOCK_SIZE)]
 
-  def test_AVBCalcMinPartitionSize_LinearFooterSize(self):
+    prop_dict = {
+        'partition_size': None,
+        'partition_name': 'system',
+        'avb_avbtool': 'avbtool',
+        'avb_hashtree_enable': 'true',
+        'avb_add_hashtree_footer_args': None,
+    }
+    self.builder = CreateVerityImageBuilder(prop_dict)
+    self.assertEqual(2, self.builder.version)
+
+  def test_CalculateMinPartitionSize_LinearFooterSize(self):
     """Tests with footer size which is linear to partition size."""
     for image_size in self._image_sizes:
       for ratio in 0.95, 0.56, 0.22:
         expected_size = common.RoundUpTo4K(int(math.ceil(image_size / ratio)))
         self.assertEqual(
             expected_size,
-            AVBCalcMinPartitionSize(
+            self.builder.CalculateMinPartitionSize(
                 image_size, lambda x, ratio=ratio: int(x * ratio)))
 
   def test_AVBCalcMinPartitionSize_SlowerGrowthFooterSize(self):
@@ -191,7 +203,8 @@
       return partition_size - int(math.pow(partition_size, 0.95))
 
     for image_size in self._image_sizes:
-      min_partition_size = AVBCalcMinPartitionSize(image_size, _SizeCalculator)
+      min_partition_size = self.builder.CalculateMinPartitionSize(
+          image_size, _SizeCalculator)
       # Checks min_partition_size can accommodate image_size.
       self.assertGreaterEqual(
           _SizeCalculator(min_partition_size),
@@ -201,7 +214,7 @@
           _SizeCalculator(min_partition_size - BLOCK_SIZE),
           image_size)
 
-  def test_AVBCalcMinPartitionSize_FasterGrowthFooterSize(self):
+  def test_CalculateMinPartitionSize_FasterGrowthFooterSize(self):
     """Tests with footer size which grows faster than partition size."""
 
     def _SizeCalculator(partition_size):
@@ -211,7 +224,8 @@
       return int(math.pow(partition_size, 0.95))
 
     for image_size in self._image_sizes:
-      min_partition_size = AVBCalcMinPartitionSize(image_size, _SizeCalculator)
+      min_partition_size = self.builder.CalculateMinPartitionSize(
+          image_size, _SizeCalculator)
       # Checks min_partition_size can accommodate image_size.
       self.assertGreaterEqual(
           _SizeCalculator(min_partition_size),
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 00af296..0a3dcec 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -39,30 +39,30 @@
     Exception.__init__(self, message)
 
 
-def GetVerityFECSize(partition_size):
-  cmd = ["fec", "-s", str(partition_size)]
+def GetVerityFECSize(image_size):
+  cmd = ["fec", "-s", str(image_size)]
   output = common.RunAndCheckOutput(cmd, verbose=False)
   return int(output)
 
 
-def GetVerityTreeSize(partition_size):
-  cmd = ["build_verity_tree", "-s", str(partition_size)]
+def GetVerityTreeSize(image_size):
+  cmd = ["build_verity_tree", "-s", str(image_size)]
   output = common.RunAndCheckOutput(cmd, verbose=False)
   return int(output)
 
 
-def GetVerityMetadataSize(partition_size):
-  cmd = ["build_verity_metadata.py", "size", str(partition_size)]
+def GetVerityMetadataSize(image_size):
+  cmd = ["build_verity_metadata.py", "size", str(image_size)]
   output = common.RunAndCheckOutput(cmd, verbose=False)
   return int(output)
 
 
-def GetVeritySize(partition_size, fec_supported):
-  verity_tree_size = GetVerityTreeSize(partition_size)
-  verity_metadata_size = GetVerityMetadataSize(partition_size)
+def GetVeritySize(image_size, fec_supported):
+  verity_tree_size = GetVerityTreeSize(image_size)
+  verity_metadata_size = GetVerityMetadataSize(image_size)
   verity_size = verity_tree_size + verity_metadata_size
   if fec_supported:
-    fec_size = GetVerityFECSize(partition_size + verity_size)
+    fec_size = GetVerityFECSize(image_size + verity_size)
     return verity_size + fec_size
   return verity_size
 
@@ -79,54 +79,6 @@
   simg.AppendFillChunk(0, blocks)
 
 
-def AdjustPartitionSizeForVerity(partition_size, fec_supported):
-  """Modifies the provided partition size to account for the verity metadata.
-
-  This information is used to size the created image appropriately.
-
-  Args:
-    partition_size: the size of the partition to be verified.
-
-  Returns:
-    A tuple of the size of the partition adjusted for verity metadata, and
-    the size of verity metadata.
-  """
-  key = "%d %d" % (partition_size, fec_supported)
-  if key in AdjustPartitionSizeForVerity.results:
-    return AdjustPartitionSizeForVerity.results[key]
-
-  hi = partition_size
-  if hi % BLOCK_SIZE != 0:
-    hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
-
-  # verity tree and fec sizes depend on the partition size, which
-  # means this estimate is always going to be unnecessarily small
-  verity_size = GetVeritySize(hi, fec_supported)
-  lo = partition_size - verity_size
-  result = lo
-
-  # do a binary search for the optimal size
-  while lo < hi:
-    i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
-    v = GetVeritySize(i, fec_supported)
-    if i + v <= partition_size:
-      if result < i:
-        result = i
-        verity_size = v
-      lo = i + BLOCK_SIZE
-    else:
-      hi = i
-
-  logger.info(
-      "Adjusted partition size for verity, partition_size: %s, verity_size: %s",
-      result, verity_size)
-  AdjustPartitionSizeForVerity.results[key] = (result, verity_size)
-  return (result, verity_size)
-
-
-AdjustPartitionSizeForVerity.results = {}
-
-
 def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
                    padding_size):
   cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
@@ -168,6 +120,7 @@
   try:
     common.RunAndCheckOutput(cmd)
   except:
+    logger.exception(error_message)
     raise BuildVerityImageError(error_message)
 
 
@@ -182,190 +135,369 @@
       for line in input_file:
         out_file.write(line)
   except IOError:
+    logger.exception(error_message)
     raise BuildVerityImageError(error_message)
 
 
-def BuildVerifiedImage(data_image_path, verity_image_path,
-                       verity_metadata_path, verity_fec_path,
-                       padding_size, fec_supported):
-  Append(
-      verity_image_path, verity_metadata_path,
-      "Could not append verity metadata!")
-
-  if fec_supported:
-    # Build FEC for the entire partition, including metadata.
-    BuildVerityFEC(
-        data_image_path, verity_image_path, verity_fec_path, padding_size)
-    Append(verity_image_path, verity_fec_path, "Could not append FEC!")
-
-  Append2Simg(
-      data_image_path, verity_image_path, "Could not append verity data!")
-
-
-def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
-  """Creates an image that is verifiable using dm-verity.
+def CreateVerityImageBuilder(prop_dict):
+  """Returns a verity image builder based on the given build properties.
 
   Args:
-    out_file: the location to write the verifiable image at
-    prop_dict: a dictionary of properties required for image creation and
-               verification
-
-  Raises:
-    AssertionError: On invalid partition sizes.
-  """
-  # get properties
-  image_size = int(prop_dict["image_size"])
-  block_dev = prop_dict["verity_block_device"]
-  signer_key = prop_dict["verity_key"] + ".pk8"
-  if OPTIONS.verity_signer_path is not None:
-    signer_path = OPTIONS.verity_signer_path
-  else:
-    signer_path = prop_dict["verity_signer_cmd"]
-  signer_args = OPTIONS.verity_signer_args
-
-  tempdir_name = common.MakeTempDir(suffix="_verity_images")
-
-  # Get partial image paths.
-  verity_image_path = os.path.join(tempdir_name, "verity.img")
-  verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
-  verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
-
-  # Build the verity tree and get the root hash and salt.
-  root_hash, salt = BuildVerityTree(out_file, verity_image_path)
-
-  # Build the metadata blocks.
-  verity_disable = "verity_disable" in prop_dict
-  BuildVerityMetadata(
-      image_size, verity_metadata_path, root_hash, salt, block_dev, signer_path,
-      signer_key, signer_args, verity_disable)
-
-  # Build the full verified image.
-  partition_size = int(prop_dict["partition_size"])
-  verity_size = int(prop_dict["verity_size"])
-
-  padding_size = partition_size - image_size - verity_size
-  assert padding_size >= 0
-
-  BuildVerifiedImage(
-      out_file, verity_image_path, verity_metadata_path, verity_fec_path,
-      padding_size, fec_supported)
-
-
-def AVBCalcMaxImageSize(avbtool, footer_type, partition_size, additional_args):
-  """Calculates max image size for a given partition size.
-
-  Args:
-    avbtool: String with path to avbtool.
-    footer_type: 'hash' or 'hashtree' for generating footer.
-    partition_size: The size of the partition in question.
-    additional_args: Additional arguments to pass to "avbtool add_hash_footer"
-        or "avbtool add_hashtree_footer".
+    prop_dict: A dict that contains the build properties. In particular, it will
+        look for verity-related property values.
 
   Returns:
-    The maximum image size.
-
-  Raises:
-    BuildVerityImageError: On invalid image size.
+    A VerityImageBuilder instance for Verified Boot 1.0 or Verified Boot 2.0; or
+        None if the given build doesn't support Verified Boot.
   """
-  cmd = [avbtool, "add_%s_footer" % footer_type,
-         "--partition_size", str(partition_size), "--calc_max_image_size"]
-  cmd.extend(shlex.split(additional_args))
+  partition_size = prop_dict.get("partition_size")
+  # partition_size could be None at this point, if using dynamic partitions.
+  if partition_size:
+    partition_size = int(partition_size)
 
-  output = common.RunAndCheckOutput(cmd)
-  image_size = int(output)
-  if image_size <= 0:
-    raise BuildVerityImageError(
-        "Invalid max image size: {}".format(output))
-  return image_size
-
-
-def AVBCalcMinPartitionSize(image_size, size_calculator):
-  """Calculates min partition size for a given image size.
-
-  Args:
-    image_size: The size of the image in question.
-    size_calculator: The function to calculate max image size
-        for a given partition size.
-
-  Returns:
-    The minimum partition size required to accommodate the image size.
-  """
-  # Use image size as partition size to approximate final partition size.
-  image_ratio = size_calculator(image_size) / float(image_size)
-
-  # Prepare a binary search for the optimal partition size.
-  lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - BLOCK_SIZE
-
-  # Ensure lo is small enough: max_image_size should <= image_size.
-  delta = BLOCK_SIZE
-  max_image_size = size_calculator(lo)
-  while max_image_size > image_size:
-    image_ratio = max_image_size / float(lo)
-    lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - delta
-    delta *= 2
-    max_image_size = size_calculator(lo)
-
-  hi = lo + BLOCK_SIZE
-
-  # Ensure hi is large enough: max_image_size should >= image_size.
-  delta = BLOCK_SIZE
-  max_image_size = size_calculator(hi)
-  while max_image_size < image_size:
-    image_ratio = max_image_size / float(hi)
-    hi = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE + delta
-    delta *= 2
-    max_image_size = size_calculator(hi)
-
-  partition_size = hi
-
-  # Start to binary search.
-  while lo < hi:
-    mid = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
-    max_image_size = size_calculator(mid)
-    if max_image_size >= image_size:  # if mid can accommodate image_size
-      if mid < partition_size:  # if a smaller partition size is found
-        partition_size = mid
-      hi = mid
+  # Verified Boot 1.0
+  verity_supported = prop_dict.get("verity") == "true"
+  is_verity_partition = "verity_block_device" in prop_dict
+  if verity_supported and is_verity_partition:
+    if OPTIONS.verity_signer_path is not None:
+      signer_path = OPTIONS.verity_signer_path
     else:
-      lo = mid + BLOCK_SIZE
+      signer_path = prop_dict["verity_signer_cmd"]
+    return Version1VerityImageBuilder(
+        partition_size,
+        prop_dict["verity_block_device"],
+        prop_dict.get("verity_fec") == "true",
+        signer_path,
+        prop_dict["verity_key"] + ".pk8",
+        OPTIONS.verity_signer_args,
+        "verity_disable" in prop_dict)
 
-  logger.info(
-      "AVBCalcMinPartitionSize(%d): partition_size: %d.",
-      image_size, partition_size)
+  # Verified Boot 2.0
+  if (prop_dict.get("avb_hash_enable") == "true" or
+      prop_dict.get("avb_hashtree_enable") == "true"):
+    # key_path and algorithm are only available when chain partition is used.
+    key_path = prop_dict.get("avb_key_path")
+    algorithm = prop_dict.get("avb_algorithm")
+    if prop_dict.get("avb_hash_enable") == "true":
+      return VerifiedBootVersion2VerityImageBuilder(
+          prop_dict["partition_name"],
+          partition_size,
+          VerifiedBootVersion2VerityImageBuilder.AVB_HASH_FOOTER,
+          prop_dict["avb_avbtool"],
+          key_path,
+          algorithm,
+          prop_dict.get("avb_salt"),
+          prop_dict["avb_add_hash_footer_args"])
+    else:
+      return VerifiedBootVersion2VerityImageBuilder(
+          prop_dict["partition_name"],
+          partition_size,
+          VerifiedBootVersion2VerityImageBuilder.AVB_HASHTREE_FOOTER,
+          prop_dict["avb_avbtool"],
+          key_path,
+          algorithm,
+          prop_dict.get("avb_salt"),
+          prop_dict["avb_add_hashtree_footer_args"])
 
-  return partition_size
+  return None
 
 
-def AVBAddFooter(image_path, avbtool, footer_type, partition_size,
-                 partition_name, key_path, algorithm, salt,
-                 additional_args):
-  """Adds dm-verity hashtree and AVB metadata to an image.
+class VerityImageBuilder(object):
+  """A builder that generates an image with verity metadata for Verified Boot.
 
-  Args:
-    image_path: Path to image to modify.
-    avbtool: String with path to avbtool.
-    footer_type: 'hash' or 'hashtree' for generating footer.
-    partition_size: The size of the partition in question.
-    partition_name: The name of the partition - will be embedded in metadata.
-    key_path: Path to key to use or None.
-    algorithm: Name of algorithm to use or None.
-    salt: The salt to use (a hexadecimal string) or None.
-    additional_args: Additional arguments to pass to "avbtool add_hash_footer"
-        or "avbtool add_hashtree_footer".
+  A VerityImageBuilder instance handles the works for building an image with
+  verity metadata for supporting Android Verified Boot. This class defines the
+  common interface between Verified Boot 1.0 and Verified Boot 2.0. A matching
+  builder will be returned based on the given build properties.
+
+  More info on the verity image generation can be found at the following link.
+  https://source.android.com/security/verifiedboot/dm-verity#implementation
   """
-  cmd = [avbtool, "add_%s_footer" % footer_type,
-         "--partition_size", partition_size,
-         "--partition_name", partition_name,
-         "--image", image_path]
 
-  if key_path and algorithm:
-    cmd.extend(["--key", key_path, "--algorithm", algorithm])
-  if salt:
-    cmd.extend(["--salt", salt])
+  def CalculateMaxImageSize(self, partition_size):
+    """Calculates the filesystem image size for the given partition size."""
+    raise NotImplementedError
 
-  cmd.extend(shlex.split(additional_args))
+  def CalculateDynamicPartitionSize(self, image_size):
+    """Calculates and sets the partition size for a dynamic partition."""
+    raise NotImplementedError
 
-  common.RunAndCheckOutput(cmd)
+  def PadSparseImage(self, out_file):
+    """Adds padding to the generated sparse image."""
+    raise NotImplementedError
+
+  def Build(self, out_file):
+    """Builds the verity image and writes it to the given file."""
+    raise NotImplementedError
+
+
+class Version1VerityImageBuilder(VerityImageBuilder):
+  """A VerityImageBuilder for Verified Boot 1.0."""
+
+  def __init__(self, partition_size, block_dev, fec_supported, signer_path,
+               signer_key, signer_args, verity_disable):
+    self.version = 1
+    self.partition_size = partition_size
+    self.block_device = block_dev
+    self.fec_supported = fec_supported
+    self.signer_path = signer_path
+    self.signer_key = signer_key
+    self.signer_args = signer_args
+    self.verity_disable = verity_disable
+    self.image_size = None
+    self.verity_size = None
+
+  def CalculateDynamicPartitionSize(self, image_size):
+    # This needs to be implemented. Note that returning the given image size as
+    # the partition size doesn't make sense, as it will fail later.
+    raise NotImplementedError
+
+  def CalculateMaxImageSize(self, partition_size=None):
+    """Calculates the max image size by accounting for the verity metadata.
+
+    Args:
+      partition_size: The partition size, which defaults to self.partition_size
+          if unspecified.
+
+    Returns:
+      The size of the image adjusted for verity metadata.
+    """
+    if partition_size is None:
+      partition_size = self.partition_size
+    assert partition_size > 0, \
+        "Invalid partition size: {}".format(partition_size)
+
+    hi = partition_size
+    if hi % BLOCK_SIZE != 0:
+      hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
+
+    # verity tree and fec sizes depend on the partition size, which
+    # means this estimate is always going to be unnecessarily small
+    verity_size = GetVeritySize(hi, self.fec_supported)
+    lo = partition_size - verity_size
+    result = lo
+
+    # do a binary search for the optimal size
+    while lo < hi:
+      i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
+      v = GetVeritySize(i, self.fec_supported)
+      if i + v <= partition_size:
+        if result < i:
+          result = i
+          verity_size = v
+        lo = i + BLOCK_SIZE
+      else:
+        hi = i
+
+    self.image_size = result
+    self.verity_size = verity_size
+
+    logger.info(
+        "Calculated image size for verity: partition_size %d, image_size %d, "
+        "verity_size %d", partition_size, result, verity_size)
+    return result
+
+  def Build(self, out_file):
+    """Creates an image that is verifiable using dm-verity.
+
+    Args:
+      out_file: the location to write the verifiable image at
+
+    Returns:
+      AssertionError: On invalid partition sizes.
+      BuildVerityImageError: On other errors.
+    """
+    image_size = int(self.image_size)
+    tempdir_name = common.MakeTempDir(suffix="_verity_images")
+
+    # Get partial image paths.
+    verity_image_path = os.path.join(tempdir_name, "verity.img")
+    verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
+
+    # Build the verity tree and get the root hash and salt.
+    root_hash, salt = BuildVerityTree(out_file, verity_image_path)
+
+    # Build the metadata blocks.
+    BuildVerityMetadata(
+        image_size, verity_metadata_path, root_hash, salt, self.block_device,
+        self.signer_path, self.signer_key, self.signer_args,
+        self.verity_disable)
+
+    padding_size = self.partition_size - self.image_size - self.verity_size
+    assert padding_size >= 0
+
+    # Build the full verified image.
+    Append(
+        verity_image_path, verity_metadata_path,
+        "Failed to append verity metadata")
+
+    if self.fec_supported:
+      # Build FEC for the entire partition, including metadata.
+      verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
+      BuildVerityFEC(
+          out_file, verity_image_path, verity_fec_path, padding_size)
+      Append(verity_image_path, verity_fec_path, "Failed to append FEC")
+
+    Append2Simg(
+        out_file, verity_image_path, "Failed to append verity data")
+
+  def PadSparseImage(self, out_file):
+    sparse_image_size = GetSimgSize(out_file)
+    if sparse_image_size > self.image_size:
+      raise BuildVerityImageError(
+          "Error: image size of {} is larger than partition size of "
+          "{}".format(sparse_image_size, self.image_size))
+    ZeroPadSimg(out_file, self.image_size - sparse_image_size)
+
+
+class VerifiedBootVersion2VerityImageBuilder(VerityImageBuilder):
+  """A VerityImageBuilder for Verified Boot 2.0."""
+
+  AVB_HASH_FOOTER = 1
+  AVB_HASHTREE_FOOTER = 2
+
+  def __init__(self, partition_name, partition_size, footer_type, avbtool,
+               key_path, algorithm, salt, signing_args):
+    self.version = 2
+    self.partition_name = partition_name
+    self.partition_size = partition_size
+    self.footer_type = footer_type
+    self.avbtool = avbtool
+    self.algorithm = algorithm
+    self.key_path = key_path
+    self.salt = salt
+    self.signing_args = signing_args
+    self.image_size = None
+
+  def CalculateMinPartitionSize(self, image_size, size_calculator=None):
+    """Calculates min partition size for a given image size.
+
+    This is used when determining the partition size for a dynamic partition,
+    which should be cover the given image size (for filesystem files) as well as
+    the verity metadata size.
+
+    Args:
+      image_size: The size of the image in question.
+      size_calculator: The function to calculate max image size
+          for a given partition size.
+
+    Returns:
+      The minimum partition size required to accommodate the image size.
+    """
+    if size_calculator is None:
+      size_calculator = self.CalculateMaxImageSize
+
+    # Use image size as partition size to approximate final partition size.
+    image_ratio = size_calculator(image_size) / float(image_size)
+
+    # Prepare a binary search for the optimal partition size.
+    lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - BLOCK_SIZE
+
+    # Ensure lo is small enough: max_image_size should <= image_size.
+    delta = BLOCK_SIZE
+    max_image_size = size_calculator(lo)
+    while max_image_size > image_size:
+      image_ratio = max_image_size / float(lo)
+      lo = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE - delta
+      delta *= 2
+      max_image_size = size_calculator(lo)
+
+    hi = lo + BLOCK_SIZE
+
+    # Ensure hi is large enough: max_image_size should >= image_size.
+    delta = BLOCK_SIZE
+    max_image_size = size_calculator(hi)
+    while max_image_size < image_size:
+      image_ratio = max_image_size / float(hi)
+      hi = int(image_size / image_ratio) // BLOCK_SIZE * BLOCK_SIZE + delta
+      delta *= 2
+      max_image_size = size_calculator(hi)
+
+    partition_size = hi
+
+    # Start to binary search.
+    while lo < hi:
+      mid = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
+      max_image_size = size_calculator(mid)
+      if max_image_size >= image_size:  # if mid can accommodate image_size
+        if mid < partition_size:  # if a smaller partition size is found
+          partition_size = mid
+        hi = mid
+      else:
+        lo = mid + BLOCK_SIZE
+
+    logger.info(
+        "CalculateMinPartitionSize(%d): partition_size %d.", image_size,
+        partition_size)
+
+    return partition_size
+
+  def CalculateDynamicPartitionSize(self, image_size):
+    self.partition_size = self.CalculateMinPartitionSize(image_size)
+    return self.partition_size
+
+  def CalculateMaxImageSize(self, partition_size=None):
+    """Calculates max image size for a given partition size.
+
+    Args:
+      partition_size: The partition size, which defaults to self.partition_size
+          if unspecified.
+
+    Returns:
+      The maximum image size.
+
+    Raises:
+      BuildVerityImageError: On error or getting invalid image size.
+    """
+    if partition_size is None:
+      partition_size = self.partition_size
+    assert partition_size > 0, \
+        "Invalid partition size: {}".format(partition_size)
+
+    add_footer = ("add_hash_footer" if self.footer_type == self.AVB_HASH_FOOTER
+                  else "add_hashtree_footer")
+    cmd = [self.avbtool, add_footer, "--partition_size",
+           str(partition_size), "--calc_max_image_size"]
+    cmd.extend(shlex.split(self.signing_args))
+
+    proc = common.Run(cmd)
+    output, _ = proc.communicate()
+    if proc.returncode != 0:
+      raise BuildVerityImageError(
+          "Failed to calculate max image size:\n{}".format(output))
+    image_size = int(output)
+    if image_size <= 0:
+      raise BuildVerityImageError(
+          "Invalid max image size: {}".format(output))
+    self.image_size = image_size
+    return image_size
+
+  def PadSparseImage(self, out_file):
+    # No-op as the padding is taken care of by avbtool.
+    pass
+
+  def Build(self, out_file):
+    """Adds dm-verity hashtree and AVB metadata to an image.
+
+    Args:
+      out_file: Path to image to modify.
+    """
+    add_footer = ("add_hash_footer" if self.footer_type == self.AVB_HASH_FOOTER
+                  else "add_hashtree_footer")
+    cmd = [self.avbtool, add_footer,
+           "--partition_size", str(self.partition_size),
+           "--partition_name", self.partition_name,
+           "--image", out_file]
+    if self.key_path and self.algorithm:
+      cmd.extend(["--key", self.key_path, "--algorithm", self.algorithm])
+    if self.salt:
+      cmd.extend(["--salt", self.salt])
+    cmd.extend(shlex.split(self.signing_args))
+
+    proc = common.Run(cmd)
+    output, _ = proc.communicate()
+    if proc.returncode != 0:
+      raise BuildVerityImageError("Failed to add AVB footer: {}".format(output))
 
 
 class HashtreeInfoGenerationError(Exception):
@@ -415,7 +547,7 @@
 
     Arguments:
       partition_size: The whole size in bytes of a partition, including the
-        filesystem size, padding size, and verity size.
+          filesystem size, padding size, and verity size.
       block_size: Expected size in bytes of each block for the sparse image.
       fec_supported: True if the verity section contains fec data.
     """
@@ -429,6 +561,20 @@
     self.hashtree_size = None
     self.metadata_size = None
 
+    prop_dict = {
+        'partition_size': str(partition_size),
+        'verity': 'true',
+        'verity_fec': 'true' if fec_supported else None,
+        # 'verity_block_device' needs to be present to indicate a verity-enabled
+        # partition.
+        'verity_block_device': '',
+        # We don't need the following properties that are needed for signing the
+        # verity metadata.
+        'verity_key': '',
+        'verity_signer_cmd': None,
+    }
+    self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
+
     self.hashtree_info = HashtreeInfo()
 
   def DecomposeSparseImage(self, image):
@@ -445,8 +591,7 @@
         "partition size {} doesn't match with the calculated image size." \
         " total_blocks: {}".format(self.partition_size, image.total_blocks)
 
-    adjusted_size, _ = AdjustPartitionSizeForVerity(
-        self.partition_size, self.fec_supported)
+    adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
     assert adjusted_size % self.block_size == 0
 
     verity_tree_size = GetVerityTreeSize(adjusted_size)
@@ -502,7 +647,7 @@
   def ValidateHashtree(self):
     """Checks that we can reconstruct the verity hash tree."""
 
-    # Writes the file system section to a temp file; and calls the executable
+    # Writes the filesystem section to a temp file; and calls the executable
     # build_verity_tree to construct the hash tree.
     adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
     with open(adjusted_partition, "wb") as fd: