Merge "Package the JDK into test suite zips"
diff --git a/Changes.md b/Changes.md
index 3109e9b..84c8d95 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,15 @@
 # Build System Changes for Android.mk Writers
 
+## `LOCAL_REQUIRED_MODULES` requires listed modules to exist {#BUILD_BROKEN_MISSING_REQUIRED_MODULES}
+
+Modules listed in `LOCAL_REQUIRED_MODULES`, `LOCAL_HOST_REQUIRED_MODULES` and
+`LOCAL_TARGET_REQUIRED_MODULES` need to exist unless `ALLOW_MISSING_DEPENDENCIES`
+is set.
+
+To temporarily relax missing required modules check, use:
+
+`BUILD_BROKEN_MISSING_REQUIRED_MODULES := true`
+
 ## Changes in system properties settings
 
 ### Product variables
diff --git a/core/Makefile b/core/Makefile
index 0541f22..92723d3 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -553,12 +553,8 @@
 all_event_log_tags_src := \
     $(sort $(foreach m, $(ALL_MODULES), $(ALL_MODULES.$(m).EVENT_LOG_TAGS)))
 
-# PDK builds will already have a full list of tags that needs to get merged
-# in with the ones from source
-pdk_fusion_log_tags_file := $(patsubst $(PRODUCT_OUT)/%,$(_pdk_fusion_intermediates)/%,$(filter $(event_log_tags_file),$(ALL_PDK_FUSION_FILES)))
-
-$(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src) $(pdk_fusion_log_tags_file)
-$(all_event_log_tags_file): $(all_event_log_tags_src) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py
+$(all_event_log_tags_file): PRIVATE_SRC_FILES := $(all_event_log_tags_src)
+$(all_event_log_tags_file): $(all_event_log_tags_src) $(MERGETAGS) build/make/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES)
 
@@ -572,9 +568,9 @@
       $(ALL_MODULES.$(m).EVENT_LOG_TAGS)) \
       $(filter-out vendor/% device/% out/%,$(all_event_log_tags_src)))
 
-$(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src) $(pdk_fusion_log_tags_file)
+$(event_log_tags_file): PRIVATE_SRC_FILES := $(event_log_tags_src)
 $(event_log_tags_file): PRIVATE_MERGED_FILE := $(all_event_log_tags_file)
-$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(pdk_fusion_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py
+$(event_log_tags_file): $(event_log_tags_src) $(all_event_log_tags_file) $(MERGETAGS) build/make/tools/event_log_tags.py
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ -m $(PRIVATE_MERGED_FILE) $(PRIVATE_SRC_FILES)
 
@@ -955,7 +951,6 @@
 tools_notice_file_html := $(HOST_OUT_INTERMEDIATES)/NOTICE.html
 kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
 winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
-pdk_fusion_notice_files := $(filter $(TARGET_OUT_NOTICE_FILES)/%, $(ALL_PDK_FUSION_FILES))
 
 # Some targets get included under $(PRODUCT_OUT) for debug symbols or other
 # reasons--not to be flashed onto any device. Targets under these directories
@@ -972,7 +967,7 @@
 	        $(target_notice_file_html), \
 	        "Notices for files contained in the filesystem images in this directory:", \
 	        $(TARGET_OUT_NOTICE_FILES), \
-	        $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files), \
+	        $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file), \
 	        $(exclude_target_dirs)))
 $(target_notice_file_html_gz): $(target_notice_file_html) | $(MINIGZIP)
 	$(hide) $(MINIGZIP) -9 < $< > $@
@@ -1016,7 +1011,7 @@
 # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
-license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)
+license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file)
 # Only files copied to a system image need system image notices.
 license_modules := $(filter $(PRODUCT_OUT)/%,$(license_modules))
 # Phonys/fakes don't have notice files (though their deps might)
@@ -2275,17 +2270,10 @@
 
 # -----------------------------------------------------------------
 # system image
-#
-# Remove overridden packages from $(ALL_PDK_FUSION_FILES)
-PDK_FUSION_SYSIMG_FILES := \
-    $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk), \
-        $(ALL_PDK_FUSION_FILES))
 
 INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
     $(ALL_GENERATED_SOURCES) \
-    $(ALL_DEFAULT_INSTALLED_MODULES) \
-    $(PDK_FUSION_SYSIMG_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP))
+    $(ALL_DEFAULT_INSTALLED_MODULES)))
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
 
@@ -2453,111 +2441,14 @@
 sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES)
 
 # -----------------------------------------------------------------
-## platform.zip: system, plus other files to be used in PDK fusion build,
-## in a zip file
-##
-## PDK_PLATFORM_ZIP_PRODUCT_BINARIES is used to store specified files to platform.zip.
-## The variable will be typically set from BoardConfig.mk.
-## Files under out dir will be rejected to prevent possible conflicts with other rules.
-ifneq (,$(BUILD_PLATFORM_ZIP))
-pdk_odex_javalibs := $(strip $(foreach m,$(DEXPREOPT.MODULES.JAVA_LIBRARIES),\
-  $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
-pdk_odex_apps := $(strip $(foreach m,$(DEXPREOPT.MODULES.APPS),\
-  $(if $(filter $(DEXPREOPT.$(m).INSTALLED_STRIPPED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
-pdk_classes_dex := $(strip \
-  $(foreach m,$(pdk_odex_javalibs),$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar) \
-  $(foreach m,$(pdk_odex_apps),$(call intermediates-dir-for,APPS,$(m))/package.apk))
-
-pdk_odex_config_mk := $(PRODUCT_OUT)/pdk_dexpreopt_config.mk
-$(pdk_odex_config_mk): PRIVATE_JAVA_LIBRARIES := $(pdk_odex_javalibs)
-$(pdk_odex_config_mk): PRIVATE_APPS := $(pdk_odex_apps)
-$(pdk_odex_config_mk) :
-	@echo "PDK odex config makefile: $@"
-	$(hide) mkdir -p $(dir $@)
-	$(hide) echo "# Auto-generated. Do not modify." > $@
-	$(hide) echo "PDK.DEXPREOPT.JAVA_LIBRARIES:=$(PRIVATE_JAVA_LIBRARIES)" >> $@
-	$(hide) echo "PDK.DEXPREOPT.APPS:=$(PRIVATE_APPS)" >> $@
-	$(foreach m,$(PRIVATE_JAVA_LIBRARIES),\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\
-	  )
-	$(foreach m,$(PRIVATE_APPS),\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,APPS,$(m))/package.apk)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).PRIVILEGED_MODULE:=$(DEXPREOPT.$(m).PRIVILEGED_MODULE)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).VENDOR_MODULE:=$(DEXPREOPT.$(m).VENDOR_MODULE)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).TARGET_ARCH:=$(DEXPREOPT.$(m).TARGET_ARCH)" >> $@$(newline)\
-	  $(hide) echo "PDK.DEXPREOPT.$(m).STRIPPED_SRC:=$(patsubst $(PRODUCT_OUT)/%,%,$(DEXPREOPT.$(m).INSTALLED_STRIPPED))" >> $@$(newline)\
-	  )
-
-PDK_PLATFORM_ZIP_PRODUCT_BINARIES := $(filter-out $(OUT_DIR)/%,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES))
-INSTALLED_PLATFORM_ZIP := $(PRODUCT_OUT)/platform.zip
-
-$(INSTALLED_PLATFORM_ZIP): PRIVATE_DEX_FILES := $(pdk_classes_dex)
-$(INSTALLED_PLATFORM_ZIP): PRIVATE_ODEX_CONFIG := $(pdk_odex_config_mk)
-$(INSTALLED_PLATFORM_ZIP) : $(SOONG_ZIP)
-# dependencies for the other partitions are defined below after their file lists
-# are known
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES) $(pdk_classes_dex) $(pdk_odex_config_mk) $(API_FINGERPRINT)
-	$(call pretty,"Platform zip package: $(INSTALLED_PLATFORM_ZIP)")
-	rm -f $@ $@.lst
-	echo "-C $(PRODUCT_OUT)" >> $@.lst
-	echo "-D $(TARGET_OUT)" >> $@.lst
-	echo "-D $(TARGET_OUT_NOTICE_FILES)" >> $@.lst
-	echo "$(addprefix -f $(TARGET_OUT_UNSTRIPPED)/,$(PDK_SYMBOL_FILES_LIST))" >> $@.lst
-ifdef BUILDING_VENDOR_IMAGE
-	echo "-D $(TARGET_OUT_VENDOR)" >> $@.lst
-endif
-ifdef BUILDING_PRODUCT_IMAGE
-	echo "-D $(TARGET_OUT_PRODUCT)" >> $@.lst
-endif
-ifdef BUILDING_SYSTEM_EXT_IMAGE
-	echo "-D $(TARGET_OUT_SYSTEM_EXT)" >> $@.lst
-endif
-ifdef BUILDING_ODM_IMAGE
-	echo "-D $(TARGET_OUT_ODM)" >> $@.lst
-endif
-ifdef BUILDING_VENDOR_DLKM_IMAGE
-	echo "-D $(TARGET_OUT_VENDOR_DLKM)" >> $@.lst
-endif
-ifdef BUILDING_ODM_DLKM_IMAGE
-	echo "-D $(TARGET_OUT_ODM_DLKM)" >> $@.lst
-endif
-ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
-	echo "-C $(OUT_DIR)" >> $@.lst
-	for f in $(filter-out $(PRIVATE_DEX_FILES),$(addprefix -f $(OUT_DIR)/,$(PDK_PLATFORM_JAVA_ZIP_CONTENTS))); do \
-	  if [ -e $$f ]; then \
-	    echo "-f $$f"; \
-	  fi \
-	done >> $@.lst
-endif
-ifneq ($(PDK_PLATFORM_ZIP_PRODUCT_BINARIES),)
-        echo "-C . $(addprefix -f ,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES))" >> $@.lst
-endif
-	@# Add dex-preopt files and config.
-	$(if $(PRIVATE_DEX_FILES),\
-	  echo "-C $(OUT_DIR) $(addprefix -f ,$(PRIVATE_DEX_FILES))") >> $@.lst
-	echo "-C $(dir $(API_FINGERPRINT)) -f $(API_FINGERPRINT)" >> $@.lst
-	touch $(PRODUCT_OUT)/pdk.mk
-	echo "-C $(PRODUCT_OUT) -f $(PRIVATE_ODEX_CONFIG) -f $(PRODUCT_OUT)/pdk.mk" >> $@.lst
-	$(SOONG_ZIP) --ignore_missing_files -o $@ @$@.lst
-
+# Old PDK fusion targets
 .PHONY: platform
-platform: $(INSTALLED_PLATFORM_ZIP)
+platform:
+	echo "Warning: 'platform' is obsolete"
 
 .PHONY: platform-java
-platform-java: platform
-
-# Dist the platform.zip
-ifneq (,$(filter platform platform-java, $(MAKECMDGOALS)))
-$(call dist-for-goals, platform platform-java, $(INSTALLED_PLATFORM_ZIP))
-endif
-
-endif # BUILD_PLATFORM_ZIP
+platform-java:
+	echo "Warning: 'platform-java' is obsolete"
 
 # -----------------------------------------------------------------
 # data partition image
@@ -2691,9 +2582,7 @@
 
 INTERNAL_SYSTEMOTHERIMAGE_FILES := \
     $(filter $(TARGET_OUT_SYSTEM_OTHER)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 # system_other dex files are installed as a side-effect of installing system image files
 INTERNAL_SYSTEMOTHERIMAGE_FILES += $(INTERNAL_SYSTEMIMAGE_FILES)
@@ -2756,12 +2645,7 @@
 ifdef BUILDING_VENDOR_IMAGE
 INTERNAL_VENDORIMAGE_FILES := \
     $(filter $(TARGET_OUT_VENDOR)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-
-# platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
 INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
@@ -2889,12 +2773,7 @@
 ifdef BUILDING_PRODUCT_IMAGE
 INTERNAL_PRODUCTIMAGE_FILES := \
     $(filter $(TARGET_OUT_PRODUCT)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-
-# platform.zip depends on $(INTERNAL_PRODUCTIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_PRODUCTIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
 INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json)
@@ -2945,12 +2824,7 @@
 ifdef BUILDING_SYSTEM_EXT_IMAGE
 INTERNAL_SYSTEM_EXTIMAGE_FILES := \
     $(filter $(TARGET_OUT_SYSTEM_EXT)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-
-# platform.zip depends on $(INTERNAL_SYSTEM_EXTIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_SYSTEM_EXT := $(PRODUCT_OUT)/installed-files-system_ext.txt
 INSTALLED_FILES_JSON_SYSTEM_EXT := $(INSTALLED_FILES_FILE_SYSTEM_EXT:.txt=.json)
@@ -3003,11 +2877,7 @@
 ifdef BUILDING_ODM_IMAGE
 INTERNAL_ODMIMAGE_FILES := \
     $(filter $(TARGET_OUT_ODM)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-# platform.zip depends on $(INTERNAL_ODMIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_ODMIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
 INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json)
@@ -3058,11 +2928,7 @@
 ifdef BUILDING_VENDOR_DLKM_IMAGE
 INTERNAL_VENDOR_DLKMIMAGE_FILES := \
     $(filter $(TARGET_OUT_VENDOR_DLKM)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-# platform.zip depends on $(INTERNAL_VENDOR_DLKMIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_VENDOR_DLKM := $(PRODUCT_OUT)/installed-files-vendor_dlkm.txt
 INSTALLED_FILES_JSON_VENDOR_DLKM := $(INSTALLED_FILES_FILE_VENDOR_DLKM:.txt=.json)
@@ -3113,11 +2979,7 @@
 ifdef BUILDING_ODM_DLKM_IMAGE
 INTERNAL_ODM_DLKMIMAGE_FILES := \
     $(filter $(TARGET_OUT_ODM_DLKM)/%,\
-      $(ALL_DEFAULT_INSTALLED_MODULES)\
-      $(ALL_PDK_FUSION_FILES)) \
-    $(PDK_FUSION_SYMLINK_STAMP)
-# platform.zip depends on $(INTERNAL_ODM_DLKMIMAGE_FILES).
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_ODM_DLKMIMAGE_FILES)
+      $(ALL_DEFAULT_INSTALLED_MODULES))
 
 INSTALLED_FILES_FILE_ODM_DLKM := $(PRODUCT_OUT)/installed-files-odm_dlkm.txt
 INSTALLED_FILES_JSON_ODM_DLKM := $(INSTALLED_FILES_FILE_ODM_DLKM:.txt=.json)
@@ -3937,9 +3799,6 @@
   ifeq ($(TARGET_PRODUCT),sdk)
     build_ota_package := false
   endif
-  ifeq ($(TARGET_BUILD_PDK),true)
-    build_ota_package := false
-  endif
   ifneq ($(PRODUCT_BUILD_GENERIC_OTA_PACKAGE),true)
     ifneq ($(filter generic%,$(TARGET_DEVICE)),)
       build_ota_package := false
@@ -5311,13 +5170,6 @@
 	$(hide) zip -qjX $@ $(INTERNAL_EMULATOR_PACKAGE_FILES)
 
 endif
-# -----------------------------------------------------------------
-# Old PDK stuffs, retired
-# The pdk package (Platform Development Kit)
-
-#ifneq (,$(filter pdk,$(MAKECMDGOALS)))
-#  include development/pdk/Pdk.mk
-#endif
 
 
 # -----------------------------------------------------------------
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 8fab9c6..254e09b 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -45,7 +45,7 @@
 my_min_sdk_version := $(call module-min-sdk-version)
 
 ifdef TARGET_BUILD_APPS
-  ifndef TARGET_BUILD_APPS_USE_PREBUILT_SDK
+  ifndef TARGET_BUILD_USE_PREBUILT_SDKS
     ifeq ($(my_target_sdk_version),$(PLATFORM_VERSION_CODENAME))
       ifdef UNBUNDLED_BUILD_TARGET_SDK_WITH_API_FINGERPRINT
         my_target_sdk_version := $(my_target_sdk_version).$$(cat $(API_FINGERPRINT))
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index ab574b3..5767996 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -45,7 +45,7 @@
 # We skip it for unbundled app builds where we cannot build veridex.
 module_run_appcompat :=
 ifeq (true,$(non_system_module))
-ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))  # ! unbundled app build
+ifeq (,$(TARGET_BUILD_APPS))  # ! unbundled app build
 ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
   module_run_appcompat := true
 endif
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 3f93c2c..ddf736b 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -93,6 +93,20 @@
 $(call pretty-error,Only one of LOCAL_PROPRIETARY_MODULE[$(LOCAL_PROPRIETARY_MODULE)] and LOCAL_VENDOR_MODULE[$(LOCAL_VENDOR_MODULE)] may be set, or they must be equal)
 endif
 
+ifeq ($(LOCAL_HOST_MODULE),true)
+my_image_variant := host
+else ifeq ($(LOCAL_VENDOR_MODULE),true)
+my_image_variant := vendor
+else ifeq ($(LOCAL_OEM_MODULE),true)
+my_image_variant := vendor
+else ifeq ($(LOCAL_ODM_MODULE),true)
+my_image_variant := vendor
+else ifeq ($(LOCAL_PRODUCT_MODULE),true)
+my_image_variant := product
+else
+my_image_variant := core
+endif
+
 non_system_module := $(filter true, \
    $(LOCAL_PRODUCT_MODULE) \
    $(LOCAL_SYSTEM_EXT_MODULE) \
@@ -473,7 +487,9 @@
 
 ifndef $(_local_path_target)
   $(_local_path_target) := true
-  $(eval $(call my_path_comp,$(_local_path),$(_local_path_target)))
+  ifneq (,$(findstring /,$(_local_path)))
+    $(eval $(call my_path_comp,$(_local_path),$(_local_path_target)))
+  endif
 endif
 
 _local_path :=
diff --git a/core/binary.mk b/core/binary.mk
index be7dc27..0b03f6a 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -64,10 +64,64 @@
 my_export_c_include_deps := $(LOCAL_EXPORT_C_INCLUDE_DEPS)
 my_arflags :=
 
+# Disable clang-tidy if it is not found.
+ifeq ($(PATH_TO_CLANG_TIDY),)
+  my_tidy_enabled := false
+else
+  # If LOCAL_TIDY is not defined, use global WITH_TIDY
+  my_tidy_enabled := $(LOCAL_TIDY)
+  ifeq ($(my_tidy_enabled),)
+    my_tidy_enabled := $(WITH_TIDY)
+  endif
+endif
+
+# my_tidy_checks is empty if clang-tidy is disabled.
+my_tidy_checks :=
+my_tidy_flags :=
+ifneq (,$(filter 1 true,$(my_tidy_enabled)))
+  # Set up global default checks
+  my_tidy_checks := $(WITH_TIDY_CHECKS)
+  ifeq ($(my_tidy_checks),)
+    my_tidy_checks := $(call default_global_tidy_checks,$(LOCAL_PATH))
+  endif
+  # Append local clang-tidy checks.
+  ifneq ($(LOCAL_TIDY_CHECKS),)
+    my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
+  endif
+  my_tidy_flags := $(strip $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS))
+  # If tidy flags are not specified, default to check all header files.
+  ifeq ($(my_tidy_flags),)
+    my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
+  endif
+  # If clang-tidy is not enabled globally, add the -quiet flag.
+  ifeq (,$(filter 1 true,$(WITH_TIDY)))
+    my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
+  endif
+
+  ifneq ($(my_tidy_checks),)
+    # We might be using the static analyzer through clang-tidy.
+    # https://bugs.llvm.org/show_bug.cgi?id=32914
+    my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
+
+    # A recent change in clang-tidy (r328258) enabled destructor inlining,
+    # which appears to cause a number of false positives. Until that's
+    # resolved, this turns off the effects of r328258.
+    # https://bugs.llvm.org/show_bug.cgi?id=37459
+    my_tidy_flags += -extra-arg-before=-Xclang
+    my_tidy_flags += -extra-arg-before=-analyzer-config
+    my_tidy_flags += -extra-arg-before=-Xclang
+    my_tidy_flags += -extra-arg-before=c++-temp-dtor-inlining=false
+  endif
+endif
+
+my_tidy_checks := $(subst $(space),,$(my_tidy_checks))
+
 # Configure the pool to use for clang rules.
 # If LOCAL_CC or LOCAL_CXX is set don't use goma or RBE.
+# If clang-tidy is being used, don't use the RBE pool (as clang-tidy runs in
+# the same action, and is not remoted)
 my_pool :=
-ifeq (,$(strip $(my_cc))$(strip $(my_cxx)))
+ifeq (,$(strip $(my_cc))$(strip $(my_cxx))$(strip $(my_tidy_checks)))
   my_pool := $(GOMA_OR_RBE_POOL)
 endif
 
@@ -102,6 +156,8 @@
 my_ndk_sysroot_lib :=
 my_api_level := 10000
 
+my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+
 ifneq ($(LOCAL_SDK_VERSION),)
   ifdef LOCAL_IS_HOST_MODULE
     $(error $(LOCAL_PATH): LOCAL_SDK_VERSION cannot be used in host module)
@@ -110,7 +166,6 @@
   # Make sure we've built the NDK.
   my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
 
-  my_arch := $(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
   ifneq (,$(filter arm64 x86_64,$(my_arch)))
     my_min_sdk_version := 21
   else
@@ -1060,37 +1115,39 @@
 endif
 
 ###################################################################
+## Convert to sanitized names where they exist.
+## These lists come from sanitizerStaticLibsMap; see
+## build/soong/cc/sanitize.go
+##
+## $(1): list of static dependencies
+## $(2): name of sanitizer (e.g. cfi, hwasan)
+##################################################################
+define use_soong_sanitized_static_libraries
+  $(foreach lib,$(1),$(if $(filter $(lib),\
+      $(SOONG_$(2)_$(my_image_variant)_$(my_arch)_STATIC_LIBRARIES)),\
+      $(lib).$(2),$(lib)))
+endef
+
+###################################################################
 ## When compiling a CFI enabled target, use the .cfi variant of any
 ## static dependencies (where they exist).
 ##################################################################
-define use_soong_cfi_static_libraries
-  $(foreach l,$(1),$(if $(filter $(l),$(SOONG_CFI_STATIC_LIBRARIES)),\
-      $(l).cfi,$(l)))
-endef
-
 ifneq ($(filter cfi,$(my_sanitize)),)
-  my_whole_static_libraries := $(call use_soong_cfi_static_libraries,\
-    $(my_whole_static_libraries))
-  my_static_libraries := $(call use_soong_cfi_static_libraries,\
-    $(my_static_libraries))
+  my_whole_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_whole_static_libraries),cfi)
+  my_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_static_libraries),cfi)
 endif
 
-ifneq ($(LOCAL_USE_VNDK),)
-  my_soong_hwasan_static_libraries := $(SOONG_HWASAN_VENDOR_STATIC_LIBRARIES)
-else
-  my_soong_hwasan_static_libraries = $(SOONG_HWASAN_STATIC_LIBRARIES)
-endif
-
-define use_soong_hwasan_static_libraries
-  $(foreach l,$(1),$(if $(filter $(l),$(my_soong_hwasan_static_libraries)),\
-      $(l).hwasan,$(l)))
-endef
-
+###################################################################
+## When compiling a hwasan enabled target, use the .hwasan variant
+## of any static dependencies (where they exist).
+##################################################################
 ifneq ($(filter hwaddress,$(my_sanitize)),)
-  my_whole_static_libraries := $(call use_soong_hwasan_static_libraries,\
-    $(my_whole_static_libraries))
-  my_static_libraries := $(call use_soong_hwasan_static_libraries,\
-    $(my_static_libraries))
+  my_whole_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_whole_static_libraries),hwasan)
+  my_static_libraries := $(call use_soong_sanitized_static_libraries,\
+    $(my_static_libraries),hwasan)
 endif
 
 ###########################################################
@@ -1480,61 +1537,10 @@
   endif
 endif
 
-# Disable clang-tidy if it is not found.
-ifeq ($(PATH_TO_CLANG_TIDY),)
-  my_tidy_enabled := false
-else
-  # If LOCAL_TIDY is not defined, use global WITH_TIDY
-  my_tidy_enabled := $(LOCAL_TIDY)
-  ifeq ($(my_tidy_enabled),)
-    my_tidy_enabled := $(WITH_TIDY)
-  endif
-endif
-
-# my_tidy_checks is empty if clang-tidy is disabled.
-my_tidy_checks :=
-my_tidy_flags :=
-ifneq (,$(filter 1 true,$(my_tidy_enabled)))
-  tidy_only: $(cpp_objects) $(c_objects) $(gen_c_objects) $(gen_cpp_objects)
-  # Set up global default checks
-  my_tidy_checks := $(WITH_TIDY_CHECKS)
-  ifeq ($(my_tidy_checks),)
-    my_tidy_checks := $(call default_global_tidy_checks,$(LOCAL_PATH))
-  endif
-  # Append local clang-tidy checks.
-  ifneq ($(LOCAL_TIDY_CHECKS),)
-    my_tidy_checks := $(my_tidy_checks),$(LOCAL_TIDY_CHECKS)
-  endif
-  my_tidy_flags := $(strip $(WITH_TIDY_FLAGS) $(LOCAL_TIDY_FLAGS))
-  # If tidy flags are not specified, default to check all header files.
-  ifeq ($(my_tidy_flags),)
-    my_tidy_flags := $(call default_tidy_header_filter,$(LOCAL_PATH))
-  endif
-  # If clang-tidy is not enabled globally, add the -quiet flag.
-  ifeq (,$(filter 1 true,$(WITH_TIDY)))
-    my_tidy_flags += -quiet -extra-arg-before=-fno-caret-diagnostics
-  endif
-
-  ifneq ($(my_tidy_checks),)
-    # We might be using the static analyzer through clang-tidy.
-    # https://bugs.llvm.org/show_bug.cgi?id=32914
-    my_tidy_flags += -extra-arg-before=-D__clang_analyzer__
-
-    # A recent change in clang-tidy (r328258) enabled destructor inlining,
-    # which appears to cause a number of false positives. Until that's
-    # resolved, this turns off the effects of r328258.
-    # https://bugs.llvm.org/show_bug.cgi?id=37459
-    my_tidy_flags += -extra-arg-before=-Xclang
-    my_tidy_flags += -extra-arg-before=-analyzer-config
-    my_tidy_flags += -extra-arg-before=-Xclang
-    my_tidy_flags += -extra-arg-before=c++-temp-dtor-inlining=false
-  endif
-endif
-
-my_tidy_checks := $(subst $(space),,$(my_tidy_checks))
-
-# Add dependency of clang-tidy and clang-tidy.sh
 ifneq ($(my_tidy_checks),)
+  tidy_only: $(cpp_objects) $(c_objects) $(gen_c_objects) $(gen_cpp_objects)
+
+  # Add dependency of clang-tidy and clang-tidy.sh
   $(cpp_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
   $(c_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
   $(gen_cpp_objects): $(intermediates)/%.o: $(PATH_TO_CLANG_TIDY)
diff --git a/core/board_config.mk b/core/board_config.mk
index d4fe618..b7d0178 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -91,15 +91,21 @@
 
 _board_strip_readonly_list += $(_dynamic_partitions_var_list)
 
+# Kernel related variables
+_board_strip_readonly_list += \
+  BOARD_KERNEL_BINARIES \
+  BOARD_KERNEL_MODULE_INTERFACE_VERSIONS \
+
 _build_broken_var_list := \
   BUILD_BROKEN_DUP_RULES \
+  BUILD_BROKEN_DUP_SYSPROP \
   BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
+  BUILD_BROKEN_MISSING_REQUIRED_MODULES \
   BUILD_BROKEN_OUTSIDE_INCLUDE_DIRS \
   BUILD_BROKEN_PREBUILT_ELF_FILES \
   BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
   BUILD_BROKEN_USES_NETWORK \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
-  BUILD_BROKEN_DUP_SYSPROP \
 
 _build_broken_var_list += \
   $(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 7f420a7..307c2c2 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -258,7 +258,6 @@
 LOCAL_SANITIZE_DIAG:=
 LOCAL_SANITIZE_RECOVER:=
 LOCAL_SANITIZE_NO_RECOVER:=
-LOCAL_SANITIZE_BLACKLIST :=
 LOCAL_SANITIZE_BLOCKLIST :=
 LOCAL_SDK_LIBRARIES :=
 LOCAL_SDK_RES_VERSION:=
diff --git a/core/config.mk b/core/config.mk
index a35b718..57296d8 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -154,6 +154,7 @@
 $(KATI_obsolete_var COVERAGE_PATHS,Use NATIVE_COVERAGE_PATHS instead)
 $(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
 $(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
+$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -428,93 +429,13 @@
 endif
 endif
 
-# Set up PDK so we can use TARGET_BUILD_PDK to select prebuilt tools below
-.PHONY: pdk fusion
-pdk fusion: $(DEFAULT_GOAL)
-
-# What to build:
-# pdk fusion if:
-# 1) PDK_FUSION_PLATFORM_ZIP / PDK_FUSION_PLATFORM_DIR is passed in from the environment
-# or
-# 2) the platform.zip / pdk.mk exists in the default location
-# or
-# 3) fusion is a command line build goal,
-#    PDK_FUSION_PLATFORM_ZIP is needed anyway, then do we need the 'fusion' goal?
-# otherwise pdk only if:
-# 1) pdk is a command line build goal
-# or
-# 2) TARGET_BUILD_PDK is passed in from the environment
-
-# if PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR is specified, do not override.
-ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)))
-  # Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
-  # but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
-  # with vendor/pdk/TARGET_PRODUCT.
-  # Others are set up with vendor/pdk/TARGET_DEVICE/TARGET_DEVICE-userdebug
-  _pdk_fusion_search_paths := \
-    vendor/pdk/$(TARGET_DEVICE)/$(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)/platform \
-    vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \
-    vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform \
-    vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform \
-    vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform
-
-  _pdk_fusion_default_platform_zip := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/platform.zip)))
-  ifneq (,$(_pdk_fusion_default_platform_zip))
-    PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
-    _pdk_fusion_default_platform_zip :=
-  else
-    _pdk_fusion_default_platform_mk := $(strip $(foreach p,$(_pdk_fusion_search_paths),$(wildcard $(p)/pdk.mk)))
-    ifneq (,$(_pdk_fusion_default_platform_mk))
-      PDK_FUSION_PLATFORM_DIR := $(dir $(word 1,$(_pdk_fusion_default_platform_mk)))
-      _pdk_fusion_default_platform_mk :=
-    endif
-  endif # _pdk_fusion_default_platform_zip
-  _pdk_fusion_search_paths :=
-endif # !PDK_FUSION_PLATFORM_ZIP && !PDK_FUSION_PLATFORM_DIR
-
-ifneq (,$(PDK_FUSION_PLATFORM_ZIP))
-  ifneq (,$(PDK_FUSION_PLATFORM_DIR))
-    $(error Only one of PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR may be specified)
-  endif
-endif
-
-ifneq (,$(filter pdk fusion, $(MAKECMDGOALS)))
-TARGET_BUILD_PDK := true
-ifneq (,$(filter fusion, $(MAKECMDGOALS)))
-ifeq (,$(strip $(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR)))
-  $(error Specify PDK_FUSION_PLATFORM_ZIP or PDK_FUSION_PLATFORM_DIR to do a PDK fusion.)
-endif
-endif  # fusion
-endif  # pdk or fusion
-
-ifdef PDK_FUSION_PLATFORM_ZIP
-TARGET_BUILD_PDK := true
-ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
-  ifneq (,$(wildcard $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))/pdk.mk))
-    PDK_FUSION_PLATFORM_DIR := $(patsubst %.zip,%,$(PDK_FUSION_PLATFORM_ZIP))
-    PDK_FUSION_PLATFORM_ZIP :=
-  else
-    $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
-  endif
-endif
-endif
-
-ifdef PDK_FUSION_PLATFORM_DIR
-TARGET_BUILD_PDK := true
-ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_DIR)/pdk.mk))
-  $(error Cannot find file $(PDK_FUSION_PLATFORM_DIR)/pdk.mk.)
-endif
-endif
-
-BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS))
-
 # ---------------------------------------------------------------
 # Whether we can expect a full build graph
 ALLOW_MISSING_DEPENDENCIES := $(filter true,$(ALLOW_MISSING_DEPENDENCIES))
 ifneq ($(TARGET_BUILD_APPS),)
 ALLOW_MISSING_DEPENDENCIES := true
 endif
-ifeq ($(TARGET_BUILD_PDK),true)
+ifeq ($(TARGET_BUILD_UNBUNDLED_IMAGE),true)
 ALLOW_MISSING_DEPENDENCIES := true
 endif
 ifneq ($(filter true,$(SOONG_ALLOW_MISSING_DEPENDENCIES)),)
@@ -522,13 +443,19 @@
 endif
 .KATI_READONLY := ALLOW_MISSING_DEPENDENCIES
 
-TARGET_BUILD_APPS_USE_PREBUILT_SDK :=
-ifdef TARGET_BUILD_APPS
+TARGET_BUILD_USE_PREBUILT_SDKS :=
+DISABLE_PREOPT :=
+ifneq (,$(TARGET_BUILD_APPS)$(TARGET_BUILD_UNBUNDLED_IMAGE))
+  DISABLE_PREOPT := true
   ifndef UNBUNDLED_BUILD_SDKS_FROM_SOURCE
-    TARGET_BUILD_APPS_USE_PREBUILT_SDK := true
+    TARGET_BUILD_USE_PREBUILT_SDKS := true
   endif
 endif
 
+.KATI_READONLY := \
+  TARGET_BUILD_USE_PREBUILT_SDKS \
+  DISABLE_PREOPT \
+
 prebuilt_sdk_tools := prebuilts/sdk/tools
 prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
 
@@ -550,25 +477,25 @@
 .KATI_READONLY := USE_D8
 
 #
-# Tools that are prebuilts for TARGET_BUILD_APPS
+# Tools that are prebuilts for TARGET_BUILD_USE_PREBUILT_SDKS
 #
-ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
+ifeq (,$(TARGET_BUILD_USE_PREBUILT_SDKS))
   AAPT := $(HOST_OUT_EXECUTABLES)/aapt
   MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
 
-else # TARGET_BUILD_APPS || TARGET_BUILD_PDK
+else # TARGET_BUILD_USE_PREBUILT_SDKS
   AAPT := $(prebuilt_sdk_tools_bin)/aapt
   MAINDEXCLASSES := $(prebuilt_sdk_tools)/mainDexClasses
-endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK
+endif # TARGET_BUILD_USE_PREBUILT_SDKS
 
-ifeq (,$(TARGET_BUILD_APPS))
-  # Use RenderScript prebuilts for unbundled builds but not PDK builds
+ifeq (,$(TARGET_BUILD_USE_PREBUILT_SDKS))
+  # Use RenderScript prebuilts for unbundled builds
   LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc
   BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
 else
   LLVM_RS_CC := $(prebuilt_sdk_tools_bin)/llvm-rs-cc
   BCC_COMPAT := $(prebuilt_sdk_tools_bin)/bcc_compat
-endif # TARGET_BUILD_PDK
+endif
 
 prebuilt_sdk_tools :=
 prebuilt_sdk_tools_bin :=
@@ -588,16 +515,8 @@
 # ---------------------------------------------------------------
 # Generic tools.
 
-LEX := $(prebuilt_build_tools_bin_noasan)/flex
-# The default PKGDATADIR built in the prebuilt bison is a relative path
-# prebuilts/build-tools/common/bison.
-# To run bison from elsewhere you need to set up enviromental variable
-# BISON_PKGDATADIR.
-BISON_PKGDATADIR := $(prebuilt_build_tools)/common/bison
-BISON := $(prebuilt_build_tools_bin_noasan)/bison
-YACC := $(BISON) -d
-BISON_DATA := $(wildcard $(BISON_PKGDATADIR)/* $(BISON_PKGDATADIR)/*/*)
-M4 :=$= $(prebuilt_build_tools_bin_noasan)/m4
+# These dependencies are now handled via dependencies on prebuilt_build_tool
+BISON_DATA :=$=
 
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
@@ -1109,7 +1028,7 @@
 HISTORICAL_NDK_VERSIONS_ROOT := $(TOPDIR)prebuilts/ndk
 
 # The path where app can reference the support library resources.
-ifdef TARGET_BUILD_APPS
+ifdef TARGET_BUILD_USE_PREBUILT_SDKS
 SUPPORT_LIBRARY_ROOT := $(HISTORICAL_SDK_VERSIONS_ROOT)/current/support
 else
 SUPPORT_LIBRARY_ROOT := frameworks/support
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 323bb36..eaab1b5 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -144,9 +144,6 @@
 
 # Support for local sanitize blacklist paths.
 ifneq ($(my_sanitize)$(my_global_sanitize),)
-  ifneq ($(LOCAL_SANITIZE_BLACKLIST),)
-    my_cflags += -fsanitize-blacklist=$(LOCAL_PATH)/$(LOCAL_SANITIZE_BLACKLIST)
-  endif
   ifneq ($(LOCAL_SANITIZE_BLOCKLIST),)
     my_cflags += -fsanitize-blacklist=$(LOCAL_PATH)/$(LOCAL_SANITIZE_BLOCKLIST)
   endif
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index 55f6f0b..41a2be9 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -5,7 +5,7 @@
   ENABLE_PREOPT :=
 else ifneq (true,$(filter true,$(PRODUCT_USES_DEFAULT_ART_CONFIG)))
   ENABLE_PREOPT :=
-else ifneq (,$(TARGET_BUILD_APPS))
+else ifeq (true,$(DISABLE_PREOPT))
   ENABLE_PREOPT :=
 endif
 
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 3d5f68a..799b623 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -40,8 +40,8 @@
   LOCAL_DEX_PREOPT :=
 endif
 
-# Disable preopt for TARGET_BUILD_APPS
-ifneq (,$(TARGET_BUILD_APPS))
+# Disable preopt for DISABLE_PREOPT
+ifeq (true,$(DISABLE_PREOPT))
   LOCAL_DEX_PREOPT :=
 endif
 
diff --git a/core/distdir.mk b/core/distdir.mk
index 5f40407..aad8ff3 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -28,7 +28,7 @@
 # certain files with certain goals.  When those goals are built
 # and "dist" is specified, the marked files will be copied to DIST_DIR.
 #
-# $(1): a list of goals  (e.g. droid, sdk, pdk, ndk). These must be PHONY
+# $(1): a list of goals  (e.g. droid, sdk, ndk). These must be PHONY
 # $(2): the dist files to add to those goals.  If the file contains ':',
 #       the text following the colon is the name that the file is copied
 #       to under the dist directory.  Subdirs are ok, and will be created
diff --git a/core/envsetup.mk b/core/envsetup.mk
index f78ecb4..167fed9 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -93,6 +93,7 @@
 endif
 
 TARGET_BUILD_APPS ?=
+TARGET_BUILD_UNBUNDLED_IMAGE ?=
 
 # Set to true for an unbundled build, i.e. a build without
 # support for platform targets like the system image. This also
@@ -107,11 +108,19 @@
   TARGET_BUILD_UNBUNDLED := true
 endif
 
+# TARGET_BUILD_UNBUNDLED_IMAGE also implies unbundled build.
+# (i.e. it targets to only unbundled image, such as the vendor image,
+# ,or the product image). 
+ifneq ($(TARGET_BUILD_UNBUNDLED_IMAGE),)
+  TARGET_BUILD_UNBUNDLED := true
+endif
+
 .KATI_READONLY := \
   TARGET_PRODUCT \
   TARGET_BUILD_VARIANT \
   TARGET_BUILD_APPS \
   TARGET_BUILD_UNBUNDLED \
+  TARGET_BUILD_UNBUNDLED_IMAGE \
 
 # ---------------------------------------------------------------
 # Set up configuration for host machine.  We don't do cross-
diff --git a/core/java.mk b/core/java.mk
index 2f18ad9..5fe8da5 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -4,17 +4,6 @@
 # LOCAL_MODULE_CLASS
 # all_res_assets
 
-ifeq ($(TARGET_BUILD_PDK),true)
-ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),)
-# LOCAL_SDK not defined or set to current
-ifeq ($(filter-out current,$(LOCAL_SDK_VERSION)),)
-ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-LOCAL_SDK_VERSION := $(PDK_BUILD_SDK_VERSION)
-endif #!LOCAL_NO_STANDARD_LIBRARIES
-endif
-endif # !PDK_JAVA
-endif #PDK
-
 LOCAL_NO_STANDARD_LIBRARIES:=$(strip $(LOCAL_NO_STANDARD_LIBRARIES))
 LOCAL_SDK_VERSION:=$(strip $(LOCAL_SDK_VERSION))
 
@@ -106,8 +95,8 @@
 
 aidl_preprocess_import :=
 ifdef LOCAL_SDK_VERSION
-ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)),)
-  # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
+ifneq ($(filter current system_current test_current core_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS)),)
+  # LOCAL_SDK_VERSION is current and no TARGET_BUILD_USE_PREBUILT_SDKS
   aidl_preprocess_import := $(FRAMEWORK_AIDL)
 else
   aidl_preprocess_import := $(call resolve-prebuilt-sdk-aidl-path,$(LOCAL_SDK_VERSION))
diff --git a/core/java_common.mk b/core/java_common.mk
index b7f2883..1798ca8 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -25,7 +25,7 @@
     LOCAL_JAVA_LANGUAGE_VERSION := 1.7
   else ifneq (,$(filter $(LOCAL_SDK_VERSION), $(TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT)))
     LOCAL_JAVA_LANGUAGE_VERSION := 1.8
-  else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS_USE_PREBUILT_SDK))
+  else ifneq (,$(LOCAL_SDK_VERSION)$(TARGET_BUILD_USE_PREBUILT_SDKS))
     # TODO(ccross): allow 1.9 for current and unbundled once we have SDK system modules
     LOCAL_JAVA_LANGUAGE_VERSION := 1.8
   else
@@ -268,7 +268,7 @@
       my_system_modules := $(LEGACY_CORE_PLATFORM_SYSTEM_MODULES)
     endif  # LOCAL_NO_STANDARD_LIBRARIES
 
-    ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK))
+    ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS))
       sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,system_current,$(lib_name)))
     else
       # When SDK libraries are referenced from modules built without SDK, provide the all APIs to them
@@ -283,8 +283,8 @@
              Choices are: $(TARGET_AVAILABLE_SDK_VERSIONS))
     endif
 
-    ifneq (,$(TARGET_BUILD_APPS_USE_PREBUILT_SDK)$(filter-out %current,$(LOCAL_SDK_VERSION)))
-      # TARGET_BUILD_APPS mode or numbered SDK. Use prebuilt modules.
+    ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)$(filter-out %current,$(LOCAL_SDK_VERSION)))
+      # TARGET_BUILD_USE_PREBUILT_SDKS mode or numbered SDK. Use prebuilt modules.
       sdk_module := $(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION))
       sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(call resolve-prebuilt-sdk-module,$(LOCAL_SDK_VERSION),$(lib_name)))
     else
@@ -325,7 +325,7 @@
   # related classes to be present. This change adds stubs needed for
   # javac to compile lambdas.
   ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-    ifdef TARGET_BUILD_APPS_USE_PREBUILT_SDK
+    ifdef TARGET_BUILD_USE_PREBUILT_SDKS
       full_java_bootclasspath_libs += $(call java-lib-header-files,sdk-core-lambda-stubs)
     else
       full_java_bootclasspath_libs += $(call java-lib-header-files,core-lambda-stubs)
diff --git a/core/java_prebuilt_internal.mk b/core/java_prebuilt_internal.mk
index 6ee1ae1..279b0e4 100644
--- a/core/java_prebuilt_internal.mk
+++ b/core/java_prebuilt_internal.mk
@@ -173,7 +173,7 @@
 framework_res_package_export :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
 else
diff --git a/core/java_renderscript.mk b/core/java_renderscript.mk
index bfcf59e..572d6e4 100644
--- a/core/java_renderscript.mk
+++ b/core/java_renderscript.mk
@@ -50,8 +50,8 @@
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
 
 # prepend the RenderScript system include path
-ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
-# if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS
+ifneq ($(filter-out current system_current test_current core_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
+# if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_USE_PREBUILT_SDKS
 LOCAL_RENDERSCRIPT_INCLUDES := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/include \
@@ -110,7 +110,7 @@
 rs_jni_lib := $(call intermediates-dir-for,SHARED_LIBRARIES,librsjni.so)/librsjni.so
 LOCAL_JNI_SHARED_LIBRARIES += librsjni
 
-ifneq (,$(TARGET_BUILD_APPS)$(FORCE_BUILD_RS_COMPAT))
+ifneq (,$(TARGET_BUILD_USE_PREBUILT_SDKS)$(FORCE_BUILD_RS_COMPAT))
 
 rs_compatibility_jni_libs := $(addprefix \
     $(renderscript_intermediate)/librs., \
diff --git a/core/main.mk b/core/main.mk
index 4578d90..a3d594b 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -33,6 +33,8 @@
 
 include $(SOONG_MAKEVARS_MK)
 
+YACC :=$= $(BISON) -d
+
 include $(BUILD_SYSTEM)/clang/config.mk
 
 # Write the build number to a file so it can be read back in
@@ -175,16 +177,8 @@
 endif
 
 # -----------------------------------------------------------------
-# Variable to check java support level inside PDK build.
-# Not necessary if the components is not in PDK.
-# not defined : not supported
-# "sdk" : sdk API only
-# "platform" : platform API supproted
-TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform
-
-# -----------------------------------------------------------------
-# The pdk (Platform Development Kit) build
-include build/make/core/pdk_config.mk
+# PDK builds are no longer supported, this is always platform
+TARGET_BUILD_JAVA_SUPPORT_LEVEL :=$= platform
 
 # -----------------------------------------------------------------
 
@@ -213,6 +207,9 @@
 ADDITIONAL_SYSTEM_PROPERTIES += persist.debug.dalvik.vm.core_platform_api_policy=just-warn
 endif
 
+# Define ro.sanitize.<name> properties for all global sanitizers.
+ADDITIONAL_SYSTEM_PROPERTIES += $(foreach s,$(SANITIZE_TARGET),ro.sanitize.$(s)=true)
+
 # Sets the default value of ro.postinstall.fstab.prefix to /system.
 # Device board config should override the value to /product when needed by:
 #
@@ -513,11 +510,6 @@
 
 $(foreach mk,$(subdir_makefiles),$(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] including $(mk) ...)$(eval include $(mk)))
 
-ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
-# Bring in the PDK platform.zip modules.
-include $(BUILD_SYSTEM)/pdk_fusion_modules.mk
-endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR
-
 droid_targets : blueprint_tools
 
 endif # dont_bother
@@ -633,7 +625,6 @@
 )
 endef
 
-# TODO(b/7456955): error if a required module doesn't exist.
 # Resolve the required module names to 32-bit or 64-bit variant for:
 #   ALL_MODULES.<*>.REQUIRED_FROM_TARGET
 #   ALL_MODULES.<*>.REQUIRED_FROM_HOST
@@ -666,7 +657,8 @@
             $(if $(and $(module_is_native),$(required_is_shared_library_or_native_test)), \
               $(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),$(r_i_2nd),$(r_i)), \
               $(r_i) $(r_i_2nd)))) \
-        $(eval ### TODO(b/7456955): error if r_m is empty / does not exist) \
+        $(eval r_m := $(foreach r_j,$(r_m),$(if $(ALL_MODULES.$(r_j).PATH),$(r_j)))) \
+        $(if $(r_m),,$(eval _nonexistent_required += $(1)$(comma)$(m)$(comma)$(1)$(comma)$(r_i))) \
         $(r_m))) \
     $(eval ALL_MODULES.$(m).REQUIRED_FROM_$(1) := $(sort $(r_r))) \
   ) \
@@ -689,18 +681,37 @@
     $(eval r_r := \
       $(foreach r_i,$(r), \
         $(eval r_m := $(call resolve-bitness-for-modules,$(1),$(r_i))) \
-        $(eval ### TODO(b/7456955): error if r_m is empty / does not exist) \
+        $(eval r_m := $(foreach r_j,$(r_m),$(if $(ALL_MODULES.$(r_j).PATH),$(r_j)))) \
+        $(if $(r_m),,$(eval _nonexistent_required += $(2)$(comma)$(m)$(comma)$(1)$(comma)$(r_i))) \
         $(r_m))) \
     $(eval ALL_MODULES.$(m).$(1)_REQUIRED_FROM_$(2) := $(sort $(r_r))) \
   ) \
 )
 endef
 
+_nonexistent_required :=
 $(call select-bitness-of-required-modules,TARGET)
 $(call select-bitness-of-required-modules,HOST)
 $(call select-bitness-of-required-modules,HOST_CROSS)
 $(call select-bitness-of-target-host-required-modules,TARGET,HOST)
 $(call select-bitness-of-target-host-required-modules,HOST,TARGET)
+_nonexistent_required := $(sort $(_nonexistent_required))
+
+# HOST OS darwin build is broken, disable this check for darwin for now.
+# TODO(b/162102724): Remove this
+ifeq (,$(filter $(HOST_OS),darwin))
+ifeq (,$(filter true,$(ALLOW_MISSING_DEPENDENCIES) $(BUILD_BROKEN_MISSING_REQUIRED_MODULES)))
+ifneq (,$(_nonexistent_required))
+  $(warning Missing required dependencies:)
+  $(foreach r_i,$(_nonexistent_required), \
+    $(eval r := $(subst $(comma),$(space),$(r_i))) \
+    $(info $(word 1,$(r)) module $(word 2,$(r)) requires non-existent $(word 3,$(r)) module: $(word 4,$(r))) \
+  )
+  $(warning Set BUILD_BROKEN_MISSING_REQUIRED_MODULES := true to bypass this check if this is intentional)
+  $(error Build failed)
+endif # _nonexistent_required != empty
+endif # ALLOW_MISSING_DEPENDENCIES != true && BUILD_BROKEN_MISSING_REQUIRED_MODULES != true
+endif # HOST_OS != darwin
 
 define add-required-deps
 $(1): | $(2)
@@ -1719,13 +1730,11 @@
     $(call dist-for-goals, droidcore, $(f)))
 
   ifneq ($(ANDROID_BUILD_EMBEDDED),true)
-  ifneq ($(TARGET_BUILD_PDK),true)
     $(call dist-for-goals, droidcore, \
       $(APPS_ZIP) \
       $(INTERNAL_EMULATOR_PACKAGE_TARGET) \
     )
   endif
-  endif
 
   $(call dist-for-goals, droidcore, \
     $(INSTALLED_FILES_FILE_ROOT) \
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 336048f..4d1009f 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -27,12 +27,8 @@
 	dicttool_aosp \
 	dump-products \
 	eng \
-	fusion \
 	oem_image \
 	online-system-api-sdk-docs \
-	pdk \
-	platform \
-	platform-java \
 	product-graph \
 	samplecode \
 	sdk \
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 775ee48..a97e401 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -401,7 +401,7 @@
 # resources.
 ifeq ($(LOCAL_SDK_RES_VERSION),core_current)
 # core_current doesn't contain any framework resources.
-else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+else ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 # for released sdk versions, the platform resources were built into android.jar.
 framework_res_package_export := \
     $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
@@ -529,7 +529,7 @@
 # We skip it for unbundled app builds where we cannot build veridex.
 module_run_appcompat :=
 ifeq (true,$(non_system_module))
-ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))  # ! unbundled app build
+ifeq (,$(TARGET_BUILD_APPS))  # ! unbundled app build
 ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
   module_run_appcompat := true
 endif
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
deleted file mode 100644
index 922e0ef..0000000
--- a/core/pdk_config.mk
+++ /dev/null
@@ -1,190 +0,0 @@
-# This file defines the rule to fuse the platform.zip into the current PDK build.
-PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR :=
-PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR := \
-	host/common/obj/JAVA_LIBRARIES/bouncycastle-host_intermediates \
-	host/common/obj/JAVA_LIBRARIES/compatibility-host-util_intermediates \
-	host/common/obj/JAVA_LIBRARIES/cts-tradefed-harness_intermediates \
-	host/common/obj/JAVA_LIBRARIES/hosttestlib_intermediates
-PDK_PLATFORM_JAVA_ZIP_CONTENTS :=
-
-ifneq (,$(filter platform-java, $(MAKECMDGOALS))$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
-# additional items to add to platform.zip for platform-java build
-# For these dirs, add classes.jar and javalib.jar from the dir to platform.zip
-# all paths under out dir
-PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \
-  target/common/obj/JAVA_LIBRARIES/android.test.runner_intermediates \
-  target/common/obj/JAVA_LIBRARIES/android-common_intermediates \
-  target/common/obj/JAVA_LIBRARIES/android-ex-camera2_intermediates \
-  target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
-  target/common/obj/JAVA_LIBRARIES/bouncycastle_intermediates \
-  target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \
-  target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
-  target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
-  target/common/obj/JAVA_LIBRARIES/core-icu4j_intermediates \
-  target/common/obj/JAVA_LIBRARIES/ext_intermediates \
-  target/common/obj/JAVA_LIBRARIES/framework-minus-apex_intermediates \
-  target/common/obj/JAVA_LIBRARIES/hwbinder_intermediates \
-  target/common/obj/JAVA_LIBRARIES/ims-common_intermediates \
-  target/common/obj/JAVA_LIBRARIES/okhttp_intermediates \
-  target/common/obj/JAVA_LIBRARIES/telephony-common_intermediates \
-  target/common/obj/JAVA_LIBRARIES/voip-common_intermediates \
-
-# not java libraries
-PDK_PLATFORM_JAVA_ZIP_CONTENTS += \
-	target/common/obj/APPS/framework-res_intermediates/package-export.apk \
-	target/common/obj/APPS/framework-res_intermediates/src/R.stamp
-endif # platform-java or FUSION build
-
-PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR := \
-	$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR) \
-	$(PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR)
-
-PDK_PLATFORM_JAVA_ZIP_CONTENTS += $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR),\
-    $(lib_dir)/classes.jar $(lib_dir)/classes-header.jar \
-    $(lib_dir)/javalib.jar  $(lib_dir)/classes*.dex \
-    $(lib_dir)/classes.dex.toc )
-
-# check and override java support level
-ifneq ($(TARGET_BUILD_PDK)$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR),)
-  ifneq ($(wildcard external/proguard),)
-    TARGET_BUILD_JAVA_SUPPORT_LEVEL := sdk
-  else # no proguard
-    TARGET_BUILD_JAVA_SUPPORT_LEVEL :=
-  endif
-  # platform support is set after checking platform.zip
-endif # PDK
-
-ifneq (,$(PDK_FUSION_PLATFORM_DIR)$(PDK_FUSION_PLATFORM_ZIP))
-
-_pdk_fusion_intermediates :=
-_pdk_fusion_stamp :=
-_pdk_fusion_file_list :=
-_pdk_fusion_java_file_list :=
-PDK_FUSION_SYMLINK_STAMP :=
-
-ifdef PDK_FUSION_PLATFORM_DIR
-  _pdk_fusion_intermediates := $(PDK_FUSION_PLATFORM_DIR)
-  _pdk_fusion_file_list := $(sort \
-    $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type f))
-  _pdk_fusion_java_file_list := $(filter target/common/%,$(_pdk_fusion_file_list))
-  _pdk_fusion_file_list := $(filter-out target/common/%,$(_pdk_fusion_file_list))
-
-  PDK_FUSION_SYMLINK_STAMP := $(call intermediates-dir-for, PACKAGING, pdk_fusion)/pdk_symlinks.stamp
-
-  symlink_list := $(sort \
-    $(shell cd $(PDK_FUSION_PLATFORM_DIR); find * -type l))
-$(PDK_FUSION_SYMLINK_STAMP): PRIVATE_SYMLINKS := $(foreach s,$(symlink_list),\
-    $(s):$(shell readlink $(PDK_FUSION_PLATFORM_DIR)/$(s)))
-$(PDK_FUSION_SYMLINK_STAMP):
-	$(foreach s,$(PRIVATE_SYMLINKS),\
-	  mkdir -p $(PRODUCT_OUT)/$(dir $(call word-colon,1,$(s))) && \
-	  ln -sf $(call word-colon,2,$(s)) $(PRODUCT_OUT)/$(call word-colon,1,$(s)) &&) true
-	touch $@
-
-  symlink_list :=
-endif # PDK_FUSION_PLATFORM_DIR
-
-ifdef PDK_FUSION_PLATFORM_ZIP
-  _pdk_fusion_intermediates := $(call intermediates-dir-for, PACKAGING, pdk_fusion)
-  _pdk_fusion_stamp := $(_pdk_fusion_intermediates)/pdk_fusion.stamp
-
-  _pdk_fusion_file_list := $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) \
-      '*[^/]' -x 'target/common/*' 2>/dev/null)
-  _pdk_fusion_java_file_list := \
-      $(shell unzip -Z -1 $(PDK_FUSION_PLATFORM_ZIP) 'target/common/*' 2>/dev/null)
-  _pdk_fusion_files := $(addprefix $(_pdk_fusion_intermediates)/,\
-      $(_pdk_fusion_file_list) $(_pdk_fusion_java_file_list))
-
-$(_pdk_fusion_stamp) : $(PDK_FUSION_PLATFORM_ZIP)
-	@echo "Unzip $(dir $@) <- $<"
-	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
-	$(hide) unzip -qo $< -d $(dir $@)
-	$(call split-long-arguments,-touch,$(_pdk_fusion_files))
-	$(hide) touch $@
-
-$(_pdk_fusion_files) : $(_pdk_fusion_stamp)
-endif # PDK_FUSION_PLATFORM_ZIP
-
-ifneq ($(_pdk_fusion_java_file_list),)
-  # This represents whether java build can use platform API or not
-  # This should not be used in Android.mk
-  TARGET_BUILD_PDK_JAVA_PLATFORM := true
-  ifneq ($(TARGET_BUILD_JAVA_SUPPORT_LEVEL),)
-    TARGET_BUILD_JAVA_SUPPORT_LEVEL := platform
-  endif
-endif
-
-# Implicit pattern rules to copy the fusion files to the system image directory.
-# Note that if there is already explicit rule in the build system to generate a file,
-# the pattern rule will be just ignored by make.
-# That's desired by us: we want only absent files from the platform zip package.
-# Copy with the last-modified time preserved, never follow symbolic links.
-$(PRODUCT_OUT)/% : $(_pdk_fusion_intermediates)/% $(_pdk_fusion_stamp)
-	@mkdir -p $(dir $@)
-	$(hide) rm -rf $@
-	$(hide) cp -fpPR $< $@
-
-# implicit rules for host java files
-$(HOST_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/host/common/% $(_pdk_fusion_stamp)
-	@mkdir -p $(dir $@)
-	$(hide) cp -fpPR $< $@
-
-ifeq (true,$(TARGET_BUILD_PDK_JAVA_PLATFORM))
-  PDK_FUSION_OUT_DIR := $(OUT_DIR)
-
-  define JAVA_dependency_template
-  $(call add-dependency,$(PDK_FUSION_OUT_DIR)/$(strip $(1)),\
-    $(foreach d,$(filter $(2),$(_pdk_fusion_java_file_list)),$(PDK_FUSION_OUT_DIR)/$(d)))
-  endef
-
-  # needs explicit dependency as package-export.apk is not explicitly pulled
-  $(eval $(call JAVA_dependency_template,\
-  target/common/obj/APPS/framework-res_intermediates/src/R.stamp,\
-  target/common/obj/APPS/framework-res_intermediates/package-export.apk))
-
-  # javalib.jar should pull classes.jar as classes.jar is not explicitly pulled.
-  $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\
-  $(eval $(call JAVA_dependency_template,$(lib_dir)/javalib.jar,\
-  $(lib_dir)/classes.jar)))
-
-# implicit rules for all other target files
-$(TARGET_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/target/common/% $(_pdk_fusion_stamp)
-	@mkdir -p $(dir $@)
-	$(hide) cp -fpPR $< $@
-endif # TARGET_BUILD_PDK_JAVA_PLATFORM
-
-ALL_PDK_FUSION_FILES := $(addprefix $(PRODUCT_OUT)/, $(_pdk_fusion_file_list))
-
-endif # PDK_FUSION_PLATFORM_ZIP || PDK_FUSION_PLATFORM_DIR
-
-ifeq ($(TARGET_BUILD_PDK),true)
-  $(info PDK TARGET_BUILD_JAVA_SUPPORT_LEVEL $(TARGET_BUILD_JAVA_SUPPORT_LEVEL))
-  ifeq ($(TARGET_BUILD_PDK_JAVA_PLATFORM),)
-    # SDK used for Java build under PDK
-    PDK_BUILD_SDK_VERSION := $(lastword $(TARGET_AVAILABLE_SDK_VERSIONS))
-    $(info PDK Build uses SDK $(PDK_BUILD_SDK_VERSION))
-  else # PDK_JAVA
-    $(info PDK Build uses the current platform API)
-  endif # PDK_JAVA
-endif # BUILD_PDK
-
-ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_BUILD_PDK)))
-  # files under $(PRODUCT_OUT)/symbols to help debugging.
-  # Source not included to PDK due to dependency issue, so provide symbols instead.
-
-  PDK_SYMBOL_FILES_LIST :=
-  ifeq ($(TARGET_IS_64_BIT),true)
-    PDK_SYMBOL_FILES_LIST += system/bin/app_process64
-    ifdef TARGET_2ND_ARCH
-      PDK_SYMBOL_FILES_LIST += system/bin/app_process32
-    endif
-  else
-    PDK_SYMBOL_FILES_LIST += system/bin/app_process32
-  endif
-
-  ifneq (,$(PDK_FUSION_PLATFORM_ZIP)$(PDK_FUSION_PLATFORM_DIR))
-    # symbols should be explicitly pulled for fusion build
-    $(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\
-      $(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f))))
-  endif # PLATFORM_ZIP || PLATFORM_DIR
-endif # platform.zip/dir build or PDK
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
deleted file mode 100644
index 235acf9..0000000
--- a/core/pdk_fusion_modules.mk
+++ /dev/null
@@ -1,86 +0,0 @@
-# Auto-generate module defitions from platform.zip.
-# We use these rules to rebuild .odex files of the .jar/.apk inside the platform.zip.
-#
-
-ifdef PDK_FUSION_PLATFORM_ZIP
-pdk_dexpreopt_config_mk := $(TARGET_OUT_INTERMEDIATES)/pdk_dexpreopt_config.mk
-
-$(shell rm -f $(pdk_dexpreopt_config_mk) && mkdir -p $(dir $(pdk_dexpreopt_config_mk)) && \
-        unzip -qo $(PDK_FUSION_PLATFORM_ZIP) -d $(dir $(pdk_dexpreopt_config_mk)) pdk_dexpreopt_config.mk 2>/dev/null)
-endif
-
-ifdef PDK_FUSION_PLATFORM_DIR
-pdk_dexpreopt_config_mk := $(PDK_FUSION_PLATFORM_DIR)/pdk_dexpreopt_config.mk
-endif
-
--include $(pdk_dexpreopt_config_mk)
-
-# Define a PDK prebuilt module that comes from platform.zip.
-# Must be called with $(eval)
-define prebuilt-pdk-java-module
-include $(CLEAR_VARS)
-LOCAL_MODULE:=$(1)
-LOCAL_MODULE_CLASS:=$(2)
-# Use LOCAL_PREBUILT_MODULE_FILE instead of LOCAL_SRC_FILES so we don't need to deal with LOCAL_PATH.
-LOCAL_PREBUILT_MODULE_FILE:=$(3)
-LOCAL_DEX_PREOPT:=$(4)
-LOCAL_MULTILIB:=$(5)
-LOCAL_DEX_PREOPT_FLAGS:=$(6)
-LOCAL_BUILT_MODULE_STEM:=$(7)
-LOCAL_MODULE_SUFFIX:=$(suffix $(7))
-LOCAL_PRIVILEGED_MODULE:=$(8)
-LOCAL_VENDOR_MODULE:=$(9)
-LOCAL_MODULE_TARGET_ARCH:=$(10)
-LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=$(11)
-LOCAL_CERTIFICATE:=PRESIGNED
-include $(BUILD_PREBUILT)
-
-# The source prebuilts are extracted in the rule of _pdk_fusion_stamp.
-# Use a touch rule to establish the dependency.
-ifndef PDK_FUSION_PLATFORM_DIR
-$(3) $(11) : $(_pdk_fusion_stamp)
-	$(hide) if [ ! -f $$@ ]; then \
-	  echo 'Error: $$@ does not exist. Check your platform.zip.' 1>&2; \
-	  exit 1; \
-	fi
-	$(hide) touch $$@
-endif
-endef
-
-# We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
-LOCAL_PATH := $(BUILD_SYSTEM)
-
-##### Java libraries.
-# Only set up rules for modules that aren't built from source.
-pdk_prebuilt_libraries := $(foreach l,$(PDK.DEXPREOPT.JAVA_LIBRARIES),\
-  $(if $(MODULE.TARGET.JAVA_LIBRARIES.$(l)),,$(l)))
-
-$(foreach l,$(pdk_prebuilt_libraries), $(eval \
-  $(call prebuilt-pdk-java-module,\
-    $(l),\
-    JAVA_LIBRARIES,\
-    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(l).SRC),\
-    $(PDK.DEXPREOPT.$(l).DEX_PREOPT),\
-    $(PDK.DEXPREOPT.$(l).MULTILIB),\
-    $(PDK.DEXPREOPT.$(l).DEX_PREOPT_FLAGS),\
-    javalib.jar,\
-    )))
-
-###### Apps.
-pdk_prebuilt_apps := $(foreach a,$(PDK.DEXPREOPT.APPS),\
-  $(if $(MODULE.TARGET.APPS.$(a)),,$(a)))
-
-$(foreach a,$(pdk_prebuilt_apps), $(eval \
-  $(call prebuilt-pdk-java-module,\
-    $(a),\
-    APPS,\
-    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).SRC),\
-    $(PDK.DEXPREOPT.$(a).DEX_PREOPT),\
-    $(PDK.DEXPREOPT.$(a).MULTILIB),\
-    $(PDK.DEXPREOPT.$(a).DEX_PREOPT_FLAGS),\
-    package.apk,\
-    $(PDK.DEXPREOPT.$(a).PRIVILEGED_MODULE),\
-    $(PDK.DEXPREOPT.$(a).VENDOR_MODULE),\
-    $(PDK.DEXPREOPT.$(a).TARGET_ARCH),\
-    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).STRIPPED_SRC),\
-    )))
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 46b16ac..b994b17 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -52,7 +52,7 @@
 # We skip it for unbundled app builds where we cannot build veridex.
 module_run_appcompat :=
 ifeq (true,$(non_system_module))
-ifeq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))  # ! unbundled app build
+ifeq (,$(TARGET_BUILD_APPS))  # ! unbundled app build
 ifneq ($(UNSAFE_DISABLE_HIDDENAPI_FLAGS),true)
   module_run_appcompat := true
 endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 98ab07d..4731250 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -41,8 +41,7 @@
 $(call add_json_bool, Allow_missing_dependencies,        $(ALLOW_MISSING_DEPENDENCIES))
 $(call add_json_bool, Unbundled_build,                   $(TARGET_BUILD_UNBUNDLED))
 $(call add_json_bool, Unbundled_build_apps,              $(TARGET_BUILD_APPS))
-$(call add_json_bool, Unbundled_build_sdks_from_source,  $(UNBUNDLED_BUILD_SDKS_FROM_SOURCE))
-$(call add_json_bool, Pdk,                               $(filter true,$(TARGET_BUILD_PDK)))
+$(call add_json_bool, Always_use_prebuilt_sdks,          $(TARGET_BUILD_USE_PREBUILT_SDKS))
 
 $(call add_json_bool, Debuggable,                        $(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 $(call add_json_bool, Eng,                               $(filter eng,$(TARGET_BUILD_VARIANT)))
@@ -217,6 +216,7 @@
 $(call add_json_bool, BoardUsesRecoveryAsBoot, $(BOARD_USES_RECOVERY_AS_BOOT))
 
 $(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES))
+$(call add_json_list, BoardKernelModuleInterfaceVersions, $(BOARD_KERNEL_MODULE_INTERFACE_VERSIONS))
 
 $(call json_end)
 
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 81dc2df..7a87322 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -111,7 +111,7 @@
 framework_res_package_export :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS_USE_PREBUILT_SDK),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_USE_PREBUILT_SDKS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(call resolve-prebuilt-sdk-jar-path,$(LOCAL_SDK_RES_VERSION))
 else
diff --git a/core/tasks/boot_jars_package_check.mk b/core/tasks/boot_jars_package_check.mk
index a17aaff..c9a8e27 100644
--- a/core/tasks/boot_jars_package_check.mk
+++ b/core/tasks/boot_jars_package_check.mk
@@ -17,7 +17,6 @@
 #
 
 ifneq ($(SKIP_BOOT_JARS_CHECK),true)
-ifneq ($(TARGET_BUILD_PDK),true)
 ifdef PRODUCT_BOOT_JARS
 
 intermediates := $(call intermediates-dir-for, PACKAGING, boot-jars-package-check,,COMMON)
@@ -61,5 +60,4 @@
 droidcore : check-boot-jars
 
 endif  # PRODUCT_BOOT_JARS
-endif  # TARGET_BUILD_PDK not true
 endif  # SKIP_BOOT_JARS_CHECK not true
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index d0aeb1c..9d55f42 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -33,20 +33,14 @@
 #   updating the last seen rollback index in the tamper-evident storage.
 BOARD_AVB_ROLLBACK_INDEX := 0
 
-ifndef BUILDING_GSI
 # Enable AVB chained partition for system.
 # https://android.googlesource.com/platform/external/avb/+/master/README.md
 BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
 BOARD_AVB_SYSTEM_ALGORITHM := SHA256_RSA2048
 BOARD_AVB_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
 BOARD_AVB_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
-else
-# Enable vbmeta_system on GSI targets
-BOARD_AVB_VBMETA_SYSTEM := system
-BOARD_AVB_VBMETA_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
-BOARD_AVB_VBMETA_SYSTEM_ALGORITHM := SHA256_RSA2048
-BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
-BOARD_AVB_VBMETA_SYSTEM_ROLLBACK_INDEX_LOCATION := 1
+ifdef BUILDING_GSI
+# super.img spec for GSI targets
 BOARD_SUPER_PARTITION_SIZE := 3229614080
 BOARD_SUPER_PARTITION_GROUPS := gsi_dynamic_partitions
 BOARD_GSI_DYNAMIC_PARTITIONS_PARTITION_LIST := system
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index c491d4a..b4df5fe 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -31,10 +31,6 @@
 ifeq ($(TARGET_IS_64_BIT)|$(TARGET_2ND_ARCH),true|)
 # TODO(b/110429754) remove this condition when we support 64-bit-only device
 check-vndk-list: ;
-else ifeq ($(TARGET_BUILD_PDK),true)
-# b/118634643: don't check VNDK lib list when building PDK. Some libs (libandroid_net.so
-# and some render-script related ones) can't be built in PDK due to missing frameworks/base.
-check-vndk-list: ;
 else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true)
 check-vndk-list: ;
 else ifeq ($(BOARD_VNDK_VERSION),)
@@ -211,3 +207,13 @@
 LOCAL_MODULE_RELATIVE_PATH := init
 
 include $(BUILD_PREBUILT)
+
+
+include $(CLEAR_VARS)
+LOCAL_MODULE := init.vndk-nodef.rc
+LOCAL_SRC_FILES := $(LOCAL_MODULE)
+LOCAL_MODULE_CLASS := ETC
+LOCAL_SYSTEM_EXT_MODULE := true
+LOCAL_MODULE_RELATIVE_PATH := init
+
+include $(BUILD_PREBUILT)
diff --git a/target/product/gsi/init.gsi.rc b/target/product/gsi/init.gsi.rc
index c6faba7..f482843 100644
--- a/target/product/gsi/init.gsi.rc
+++ b/target/product/gsi/init.gsi.rc
@@ -1,3 +1,5 @@
 #
 # Android init script for GSI required initialization
 #
+
+import /system/system_ext/etc/init/init.vndk-${ro.vndk.version:-nodef}.rc
diff --git a/target/product/gsi/init.legacy-gsi.rc b/target/product/gsi/init.legacy-gsi.rc
deleted file mode 100644
index 00dd576..0000000
--- a/target/product/gsi/init.legacy-gsi.rc
+++ /dev/null
@@ -1,3 +0,0 @@
-# If ro.vndk.version is not defined, import init.vndk-27.rc.
-import /system/etc/init/gsi/init.vndk-${ro.vndk.version:-27}.rc
-
diff --git a/target/product/gsi/init.vndk-27.rc b/target/product/gsi/init.vndk-27.rc
deleted file mode 100644
index d464a2f..0000000
--- a/target/product/gsi/init.vndk-27.rc
+++ /dev/null
@@ -1,3 +0,0 @@
-on early-init
-    # Set ro.vndk.version to 27 so that O-MR1-VENDOR can run latest GSI.
-    setprop ro.vndk.version 27
diff --git a/target/product/gsi/init.vndk-nodef.rc b/target/product/gsi/init.vndk-nodef.rc
new file mode 100644
index 0000000..efeef11
--- /dev/null
+++ b/target/product/gsi/init.vndk-nodef.rc
@@ -0,0 +1,3 @@
+on early-init
+    # Must define BOARD_VNDK_VERSION
+    exec - root -- /system/bin/reboot bootloader
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 5421ee0..46c956d 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -49,7 +49,8 @@
 # GSI specific tasks on boot
 PRODUCT_PACKAGES += \
     gsi_skip_mount.cfg \
-    init.gsi.rc
+    init.gsi.rc \
+    init.vndk-nodef.rc \
 
 # Support additional P and Q VNDK packages
 PRODUCT_EXTRA_VNDK_VERSIONS := 28 29
diff --git a/target/product/legacy_gsi_release.mk b/target/product/legacy_gsi_release.mk
index c1646bb..09b96fb 100644
--- a/target/product/legacy_gsi_release.mk
+++ b/target/product/legacy_gsi_release.mk
@@ -16,22 +16,8 @@
 
 include $(SRC_TARGET_DIR)/product/gsi_release.mk
 
-PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
-    system/etc/init/init.legacy-gsi.rc \
-    system/etc/init/gsi/init.vndk-27.rc \
-    system/etc/ld.config.vndk_lite.txt \
-
 # Legacy GSI support additional O-MR1 interface
 PRODUCT_EXTRA_VNDK_VERSIONS += 27
 
-# Support for the O-MR1 devices
-PRODUCT_COPY_FILES += \
-    build/make/target/product/gsi/init.legacy-gsi.rc:system/etc/init/init.legacy-gsi.rc \
-    build/make/target/product/gsi/init.vndk-27.rc:system/etc/init/gsi/init.vndk-27.rc
-
-# Namespace configuration file for non-enforcing VNDK
-PRODUCT_PACKAGES += \
-    ld.config.vndk_lite.txt
-
 # Legacy GSI relax the compatible property checking
 PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := false
diff --git a/tools/check_identical_lib.sh b/tools/check_identical_lib.sh
index c3aa41a..c9f436f 100755
--- a/tools/check_identical_lib.sh
+++ b/tools/check_identical_lib.sh
@@ -26,7 +26,19 @@
 strip_lib ${CORE} ${stripped_core}
 strip_lib ${VENDOR} ${stripped_vendor}
 if ! cmp -s ${stripped_core} ${stripped_vendor}; then
-  echo "VNDK library not in vndkMustUseVendorVariantList but has different core and vendor variant: $(basename ${CORE})"
-  echo "If the two variants need to have different runtime behavior, consider using libvndksupport."
+  echo "ERROR: VNDK library $(basename ${CORE%.so}) has different core and" \
+    "vendor variants! This means that the copy used in the system.img/etc" \
+    "and vendor.img/etc images are different. In order to preserve space on" \
+    "some devices, it is helpful if they are the same. Frequently, " \
+    "libraries are different because they or their dependencies compile" \
+    "things based on the macro '__ANDROID_VNDK__' or they specify custom" \
+    "options under 'target: { vendor: { ... } }'. Here are some possible" \
+    "resolutions:"
+  echo "ERROR: 1). Remove differences, possibly using the libvndksupport" \
+    "function android_is_in_vendor_process in order to turn this into a" \
+    "runtime difference."
+  echo "ERROR: 2). Add the library to the VndkMustUseVendorVariantList" \
+    "variable in build/soong/cc/config/vndk.go, which is used to" \
+    "acknowledge this difference."
   exit 1
 fi
diff --git a/tools/extract_kernel.py b/tools/extract_kernel.py
index 92a647b..0046b38 100755
--- a/tools/extract_kernel.py
+++ b/tools/extract_kernel.py
@@ -40,7 +40,7 @@
 # LINUX_COMPILE_HOST ") (" LINUX_COMPILER ") " UTS_VERSION "\n";
 LINUX_BANNER_PREFIX = b'Linux version '
 LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX + \
-    r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \(.*\) .*\n'
+    r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \((?P<compiler>.*)\) .*\n'
 
 
 def get_from_release(input_bytes, start_idx, key):
@@ -82,6 +82,14 @@
   return dump_from_release(input_bytes, "version")
 
 
+def dump_compiler(input_bytes):
+  """
+  Dump kernel version, w.x.y, from input_bytes. Search for the string
+  "Linux version " and do pattern matching after it. See LINUX_BANNER_REGEX.
+  """
+  return dump_from_release(input_bytes, "compiler")
+
+
 def dump_release(input_bytes):
   """
   Dump kernel release, w.x.y-..., from input_bytes. Search for the string
@@ -208,6 +216,13 @@
                       nargs='?',
                       type=argparse.FileType('wb'),
                       const=sys.stdout)
+  parser.add_argument('--output-compiler',
+                      help='If specified, write the compiler information. Use stdout if no file '
+                           'is specified.',
+                      metavar='FILE',
+                      nargs='?',
+                      type=argparse.FileType('wb'),
+                      const=sys.stdout)
   parser.add_argument('--tools',
                       help='Decompression tools to use. If not specified, PATH '
                            'is searched.',
@@ -234,6 +249,10 @@
                       "kernel release in {}".format(args.input.name)):
     ret = 1
 
+  if not dump_to_file(args.output_compiler, dump_compiler, input_bytes,
+                      "kernel compiler in {}".format(args.input.name)):
+    ret = 1
+
   return ret
 
 
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 11f92ab..ace00ac 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -93,7 +93,9 @@
     srcs: [
         "edify_generator.py",
         "ota_from_target_files.py",
+        "non_ab_ota.py",
         "target_files_diff.py",
+        "ota_utils.py",
     ],
     libs: [
         "releasetools_check_target_files_vintf",
@@ -104,6 +106,12 @@
         "brillo_update_payload",
         "checkvintf",
     ],
+    target: {
+        darwin: {
+            // required module "brillo_update_payload" is disabled on darwin
+            enabled: false,
+        },
+    },
 }
 
 //
@@ -297,6 +305,12 @@
     required: [
         "delta_generator",
     ],
+    target: {
+        darwin: {
+            // required module "delta_generator" is disabled on darwin
+            enabled: false,
+        },
+    },
 }
 
 python_binary_host {
@@ -369,6 +383,12 @@
     required: [
         "checkvintf",
     ],
+    target: {
+        darwin: {
+            // libs dep "releasetools_ota_from_target_files" is disabled on darwin
+            enabled: false,
+        },
+    },
 }
 
 python_binary_host {
@@ -466,6 +486,12 @@
     data: [
         "testdata/**/*",
     ],
+    target: {
+        darwin: {
+            // libs dep "releasetools_ota_from_target_files" is disabled on darwin
+            enabled: false,
+        },
+    },
 }
 
 python_test_host {
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index ef66112..0edefac 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -220,6 +220,52 @@
 
   raise ValueError('{} is not a valid directory or zip file'.format(inp))
 
+def CheckVintfIfTrebleEnabled(target_files, target_info):
+  """Checks compatibility info of the input target files.
+
+  Metadata used for compatibility verification is retrieved from target_zip.
+
+  Compatibility should only be checked for devices that have enabled
+  Treble support.
+
+  Args:
+    target_files: Path to zip file containing the source files to be included
+        for OTA. Can also be the path to extracted directory.
+    target_info: The BuildInfo instance that holds the target build info.
+  """
+
+  # Will only proceed if the target has enabled the Treble support (as well as
+  # having a /vendor partition).
+  if not HasTrebleEnabled(target_files, target_info):
+    return
+
+  # Skip adding the compatibility package as a workaround for b/114240221. The
+  # compatibility will always fail on devices without qualified kernels.
+  if OPTIONS.skip_compatibility_check:
+    return
+
+  if not CheckVintf(target_files, target_info):
+    raise RuntimeError("VINTF compatibility check failed")
+
+def HasTrebleEnabled(target_files, target_info):
+  def HasVendorPartition(target_files):
+    if os.path.isdir(target_files):
+      return os.path.isdir(os.path.join(target_files, "VENDOR"))
+    if zipfile.is_zipfile(target_files):
+      return HasPartition(zipfile.ZipFile(target_files), "vendor")
+    raise ValueError("Unknown target_files argument")
+
+  return (HasVendorPartition(target_files) and
+          target_info.GetBuildProp("ro.treble.enabled") == "true")
+
+
+def HasPartition(target_files_zip, partition):
+  try:
+    target_files_zip.getinfo(partition.upper() + "/")
+    return True
+  except KeyError:
+    return False
+
 
 def main(argv):
   args = common.ParseOptions(argv, __doc__)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 1846a67..89900d3 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -1227,7 +1227,7 @@
     cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
   p1 = Run(cmd, stdout=subprocess.PIPE)
   if lz4_ramdisks:
-    p2 = Run(["lz4", "-l", "-12" , "--favor-decSpeed"], stdin=p1.stdout,
+    p2 = Run(["lz4", "-l", "-12", "--favor-decSpeed"], stdin=p1.stdout,
              stdout=ramdisk_img.file.fileno())
   else:
     p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
new file mode 100644
index 0000000..3a87957
--- /dev/null
+++ b/tools/releasetools/non_ab_ota.py
@@ -0,0 +1,684 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import logging
+import os
+import zipfile
+
+import common
+import edify_generator
+import verity_utils
+from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition
+from common import OPTIONS
+from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles
+
+logger = logging.getLogger(__name__)
+
+
+def GetBlockDifferences(target_zip, source_zip, target_info, source_info,
+                        device_specific):
+  """Returns a ordered dict of block differences with partition name as key."""
+
+  def GetIncrementalBlockDifferenceForPartition(name):
+    if not HasPartition(source_zip, name):
+      raise RuntimeError(
+          "can't generate incremental that adds {}".format(name))
+
+    partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip,
+                                        info_dict=source_info,
+                                        allow_shared_blocks=allow_shared_blocks)
+
+    hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
+        name, 4096, target_info)
+    partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
+                                        info_dict=target_info,
+                                        allow_shared_blocks=allow_shared_blocks,
+                                        hashtree_info_generator=hashtree_info_generator)
+
+    # Check the first block of the source system partition for remount R/W only
+    # if the filesystem is ext4.
+    partition_source_info = source_info["fstab"]["/" + name]
+    check_first_block = partition_source_info.fs_type == "ext4"
+    # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
+    # in zip formats. However with squashfs, a) all files are compressed in LZ4;
+    # b) the blocks listed in block map may not contain all the bytes for a
+    # given file (because they're rounded to be 4K-aligned).
+    partition_target_info = target_info["fstab"]["/" + name]
+    disable_imgdiff = (partition_source_info.fs_type == "squashfs" or
+                       partition_target_info.fs_type == "squashfs")
+    return common.BlockDifference(name, partition_tgt, partition_src,
+                                  check_first_block,
+                                  version=blockimgdiff_version,
+                                  disable_imgdiff=disable_imgdiff)
+
+  if source_zip:
+    # See notes in common.GetUserImage()
+    allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or
+                           target_info.get('ext4_share_dup_blocks') == "true")
+    blockimgdiff_version = max(
+        int(i) for i in target_info.get(
+            "blockimgdiff_versions", "1").split(","))
+    assert blockimgdiff_version >= 3
+
+  block_diff_dict = collections.OrderedDict()
+  partition_names = ["system", "vendor", "product", "odm", "system_ext",
+                     "vendor_dlkm", "odm_dlkm"]
+  for partition in partition_names:
+    if not HasPartition(target_zip, partition):
+      continue
+    # Full OTA update.
+    if not source_zip:
+      tgt = common.GetUserImage(partition, OPTIONS.input_tmp, target_zip,
+                                info_dict=target_info,
+                                reset_file_map=True)
+      block_diff_dict[partition] = common.BlockDifference(partition, tgt,
+                                                          src=None)
+    # Incremental OTA update.
+    else:
+      block_diff_dict[partition] = GetIncrementalBlockDifferenceForPartition(
+          partition)
+  assert "system" in block_diff_dict
+
+  # Get the block diffs from the device specific script. If there is a
+  # duplicate block diff for a partition, ignore the diff in the generic script
+  # and use the one in the device specific script instead.
+  if source_zip:
+    device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences()
+    function_name = "IncrementalOTA_GetBlockDifferences"
+  else:
+    device_specific_diffs = device_specific.FullOTA_GetBlockDifferences()
+    function_name = "FullOTA_GetBlockDifferences"
+
+  if device_specific_diffs:
+    assert all(isinstance(diff, common.BlockDifference)
+               for diff in device_specific_diffs), \
+        "{} is not returning a list of BlockDifference objects".format(
+            function_name)
+    for diff in device_specific_diffs:
+      if diff.partition in block_diff_dict:
+        logger.warning("Duplicate block difference found. Device specific block"
+                       " diff for partition '%s' overrides the one in generic"
+                       " script.", diff.partition)
+      block_diff_dict[diff.partition] = diff
+
+  return block_diff_dict
+
+
+def WriteFullOTAPackage(input_zip, output_file):
+  target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
+
+  # We don't know what version it will be installed on top of. We expect the API
+  # just won't change very often. Similarly for fstab, it might have changed in
+  # the target build.
+  target_api_version = target_info["recovery_api_version"]
+  script = edify_generator.EdifyGenerator(target_api_version, target_info)
+
+  if target_info.oem_props and not OPTIONS.oem_no_mount:
+    target_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info)
+
+  if not OPTIONS.no_signing:
+    staging_file = common.MakeTempFile(suffix='.zip')
+  else:
+    staging_file = output_file
+
+  output_zip = zipfile.ZipFile(
+      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
+
+  device_specific = common.DeviceSpecificParams(
+      input_zip=input_zip,
+      input_version=target_api_version,
+      output_zip=output_zip,
+      script=script,
+      input_tmp=OPTIONS.input_tmp,
+      metadata=metadata,
+      info_dict=OPTIONS.info_dict)
+
+  assert HasRecoveryPatch(input_zip, info_dict=OPTIONS.info_dict)
+
+  # Assertions (e.g. downgrade check, device properties check).
+  ts = target_info.GetBuildProp("ro.build.date.utc")
+  ts_text = target_info.GetBuildProp("ro.build.date")
+  script.AssertOlderBuild(ts, ts_text)
+
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
+  device_specific.FullOTA_Assertions()
+
+  block_diff_dict = GetBlockDifferences(target_zip=input_zip, source_zip=None,
+                                        target_info=target_info,
+                                        source_info=None,
+                                        device_specific=device_specific)
+
+  # Two-step package strategy (in chronological order, which is *not*
+  # the order in which the generated script has things):
+  #
+  # if stage is not "2/3" or "3/3":
+  #    write recovery image to boot partition
+  #    set stage to "2/3"
+  #    reboot to boot partition and restart recovery
+  # else if stage is "2/3":
+  #    write recovery image to recovery partition
+  #    set stage to "3/3"
+  #    reboot to recovery partition and restart recovery
+  # else:
+  #    (stage must be "3/3")
+  #    set stage to ""
+  #    do normal full package installation:
+  #       wipe and install system, boot image, etc.
+  #       set up system to update recovery partition on first boot
+  #    complete script normally
+  #    (allow recovery to mark itself finished and reboot)
+
+  recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
+                                         OPTIONS.input_tmp, "RECOVERY")
+  if OPTIONS.two_step:
+    if not target_info.get("multistage_support"):
+      assert False, "two-step packages not supported by this build"
+    fs = target_info["fstab"]["/misc"]
+    assert fs.fs_type.upper() == "EMMC", \
+        "two-step packages only supported on devices with EMMC /misc partitions"
+    bcb_dev = {"bcb_dev": fs.device}
+    common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data)
+    script.AppendExtra("""
+if get_stage("%(bcb_dev)s") == "2/3" then
+""" % bcb_dev)
+
+    # Stage 2/3: Write recovery image to /recovery (currently running /boot).
+    script.Comment("Stage 2/3")
+    script.WriteRawImage("/recovery", "recovery.img")
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "3/3");
+reboot_now("%(bcb_dev)s", "recovery");
+else if get_stage("%(bcb_dev)s") == "3/3" then
+""" % bcb_dev)
+
+    # Stage 3/3: Make changes.
+    script.Comment("Stage 3/3")
+
+  # Dump fingerprints
+  script.Print("Target: {}".format(target_info.fingerprint))
+
+  device_specific.FullOTA_InstallBegin()
+
+  # All other partitions as well as the data wipe use 10% of the progress, and
+  # the update of the system partition takes the remaining progress.
+  system_progress = 0.9 - (len(block_diff_dict) - 1) * 0.1
+  if OPTIONS.wipe_user_data:
+    system_progress -= 0.1
+  progress_dict = {partition: 0.1 for partition in block_diff_dict}
+  progress_dict["system"] = system_progress
+
+  if target_info.get('use_dynamic_partitions') == "true":
+    # Use empty source_info_dict to indicate that all partitions / groups must
+    # be re-added.
+    dynamic_partitions_diff = common.DynamicPartitionsDifference(
+        info_dict=OPTIONS.info_dict,
+        block_diffs=block_diff_dict.values(),
+        progress_dict=progress_dict)
+    dynamic_partitions_diff.WriteScript(script, output_zip,
+                                        write_verify_script=OPTIONS.verify)
+  else:
+    for block_diff in block_diff_dict.values():
+      block_diff.WriteScript(script, output_zip,
+                             progress=progress_dict.get(block_diff.partition),
+                             write_verify_script=OPTIONS.verify)
+
+  CheckVintfIfTrebleEnabled(OPTIONS.input_tmp, target_info)
+
+  boot_img = common.GetBootableImage(
+      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+  common.CheckSize(boot_img.data, "boot.img", target_info)
+  common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
+
+  script.WriteRawImage("/boot", "boot.img")
+
+  script.ShowProgress(0.1, 10)
+  device_specific.FullOTA_InstallEnd()
+
+  if OPTIONS.extra_script is not None:
+    script.AppendExtra(OPTIONS.extra_script)
+
+  script.UnmountAll()
+
+  if OPTIONS.wipe_user_data:
+    script.ShowProgress(0.1, 10)
+    script.FormatPartition("/data")
+
+  if OPTIONS.two_step:
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "");
+""" % bcb_dev)
+    script.AppendExtra("else\n")
+
+    # Stage 1/3: Nothing to verify for full OTA. Write recovery image to /boot.
+    script.Comment("Stage 1/3")
+    _WriteRecoveryImageToBoot(script, output_zip)
+
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "2/3");
+reboot_now("%(bcb_dev)s", "");
+endif;
+endif;
+""" % bcb_dev)
+
+  script.SetProgress(1)
+  script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
+  metadata["ota-required-cache"] = str(script.required_cache)
+
+  # We haven't written the metadata entry, which will be done in
+  # FinalizeMetadata.
+  common.ZipClose(output_zip)
+
+  needed_property_files = (
+      NonAbOtaPropertyFiles(),
+  )
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
+  target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
+  source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
+
+  target_api_version = target_info["recovery_api_version"]
+  source_api_version = source_info["recovery_api_version"]
+  if source_api_version == 0:
+    logger.warning(
+        "Generating edify script for a source that can't install it.")
+
+  script = edify_generator.EdifyGenerator(
+      source_api_version, target_info, fstab=source_info["fstab"])
+
+  if target_info.oem_props or source_info.oem_props:
+    if not OPTIONS.oem_no_mount:
+      source_info.WriteMountOemScript(script)
+
+  metadata = GetPackageMetadata(target_info, source_info)
+
+  if not OPTIONS.no_signing:
+    staging_file = common.MakeTempFile(suffix='.zip')
+  else:
+    staging_file = output_file
+
+  output_zip = zipfile.ZipFile(
+      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
+
+  device_specific = common.DeviceSpecificParams(
+      source_zip=source_zip,
+      source_version=source_api_version,
+      source_tmp=OPTIONS.source_tmp,
+      target_zip=target_zip,
+      target_version=target_api_version,
+      target_tmp=OPTIONS.target_tmp,
+      output_zip=output_zip,
+      script=script,
+      metadata=metadata,
+      info_dict=source_info)
+
+  source_boot = common.GetBootableImage(
+      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
+  target_boot = common.GetBootableImage(
+      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
+  updating_boot = (not OPTIONS.two_step and
+                   (source_boot.data != target_boot.data))
+
+  target_recovery = common.GetBootableImage(
+      "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
+
+  block_diff_dict = GetBlockDifferences(target_zip=target_zip,
+                                        source_zip=source_zip,
+                                        target_info=target_info,
+                                        source_info=source_info,
+                                        device_specific=device_specific)
+
+  CheckVintfIfTrebleEnabled(OPTIONS.target_tmp, target_info)
+
+  # Assertions (e.g. device properties check).
+  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
+  device_specific.IncrementalOTA_Assertions()
+
+  # Two-step incremental package strategy (in chronological order,
+  # which is *not* the order in which the generated script has
+  # things):
+  #
+  # if stage is not "2/3" or "3/3":
+  #    do verification on current system
+  #    write recovery image to boot partition
+  #    set stage to "2/3"
+  #    reboot to boot partition and restart recovery
+  # else if stage is "2/3":
+  #    write recovery image to recovery partition
+  #    set stage to "3/3"
+  #    reboot to recovery partition and restart recovery
+  # else:
+  #    (stage must be "3/3")
+  #    perform update:
+  #       patch system files, etc.
+  #       force full install of new boot image
+  #       set up system to update recovery partition on first boot
+  #    complete script normally
+  #    (allow recovery to mark itself finished and reboot)
+
+  if OPTIONS.two_step:
+    if not source_info.get("multistage_support"):
+      assert False, "two-step packages not supported by this build"
+    fs = source_info["fstab"]["/misc"]
+    assert fs.fs_type.upper() == "EMMC", \
+        "two-step packages only supported on devices with EMMC /misc partitions"
+    bcb_dev = {"bcb_dev": fs.device}
+    common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
+    script.AppendExtra("""
+if get_stage("%(bcb_dev)s") == "2/3" then
+""" % bcb_dev)
+
+    # Stage 2/3: Write recovery image to /recovery (currently running /boot).
+    script.Comment("Stage 2/3")
+    script.AppendExtra("sleep(20);\n")
+    script.WriteRawImage("/recovery", "recovery.img")
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "3/3");
+reboot_now("%(bcb_dev)s", "recovery");
+else if get_stage("%(bcb_dev)s") != "3/3" then
+""" % bcb_dev)
+
+    # Stage 1/3: (a) Verify the current system.
+    script.Comment("Stage 1/3")
+
+  # Dump fingerprints
+  script.Print("Source: {}".format(source_info.fingerprint))
+  script.Print("Target: {}".format(target_info.fingerprint))
+
+  script.Print("Verifying current system...")
+
+  device_specific.IncrementalOTA_VerifyBegin()
+
+  WriteFingerprintAssertion(script, target_info, source_info)
+
+  # Check the required cache size (i.e. stashed blocks).
+  required_cache_sizes = [diff.required_cache for diff in
+                          block_diff_dict.values()]
+  if updating_boot:
+    boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot",
+                                                              source_info)
+    d = common.Difference(target_boot, source_boot)
+    _, _, d = d.ComputePatch()
+    if d is None:
+      include_full_boot = True
+      common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
+    else:
+      include_full_boot = False
+
+      logger.info(
+          "boot      target: %d  source: %d  diff: %d", target_boot.size,
+          source_boot.size, len(d))
+
+      common.ZipWriteStr(output_zip, "boot.img.p", d)
+
+      target_expr = 'concat("{}:",{},":{}:{}")'.format(
+          boot_type, boot_device_expr, target_boot.size, target_boot.sha1)
+      source_expr = 'concat("{}:",{},":{}:{}")'.format(
+          boot_type, boot_device_expr, source_boot.size, source_boot.sha1)
+      script.PatchPartitionExprCheck(target_expr, source_expr)
+
+      required_cache_sizes.append(target_boot.size)
+
+  if required_cache_sizes:
+    script.CacheFreeSpaceCheck(max(required_cache_sizes))
+
+  # Verify the existing partitions.
+  for diff in block_diff_dict.values():
+    diff.WriteVerifyScript(script, touched_blocks_only=True)
+
+  device_specific.IncrementalOTA_VerifyEnd()
+
+  if OPTIONS.two_step:
+    # Stage 1/3: (b) Write recovery image to /boot.
+    _WriteRecoveryImageToBoot(script, output_zip)
+
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "2/3");
+reboot_now("%(bcb_dev)s", "");
+else
+""" % bcb_dev)
+
+    # Stage 3/3: Make changes.
+    script.Comment("Stage 3/3")
+
+  script.Comment("---- start making changes here ----")
+
+  device_specific.IncrementalOTA_InstallBegin()
+
+  progress_dict = {partition: 0.1 for partition in block_diff_dict}
+  progress_dict["system"] = 1 - len(block_diff_dict) * 0.1
+
+  if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true":
+    if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true":
+      raise RuntimeError(
+          "can't generate incremental that disables dynamic partitions")
+    dynamic_partitions_diff = common.DynamicPartitionsDifference(
+        info_dict=OPTIONS.target_info_dict,
+        source_info_dict=OPTIONS.source_info_dict,
+        block_diffs=block_diff_dict.values(),
+        progress_dict=progress_dict)
+    dynamic_partitions_diff.WriteScript(
+        script, output_zip, write_verify_script=OPTIONS.verify)
+  else:
+    for block_diff in block_diff_dict.values():
+      block_diff.WriteScript(script, output_zip,
+                             progress=progress_dict.get(block_diff.partition),
+                             write_verify_script=OPTIONS.verify)
+
+  if OPTIONS.two_step:
+    common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
+    script.WriteRawImage("/boot", "boot.img")
+    logger.info("writing full boot image (forced by two-step mode)")
+
+  if not OPTIONS.two_step:
+    if updating_boot:
+      if include_full_boot:
+        logger.info("boot image changed; including full.")
+        script.Print("Installing boot image...")
+        script.WriteRawImage("/boot", "boot.img")
+      else:
+        # Produce the boot image by applying a patch to the current
+        # contents of the boot partition, and write it back to the
+        # partition.
+        logger.info("boot image changed; including patch.")
+        script.Print("Patching boot image...")
+        script.ShowProgress(0.1, 10)
+        target_expr = 'concat("{}:",{},":{}:{}")'.format(
+            boot_type, boot_device_expr, target_boot.size, target_boot.sha1)
+        source_expr = 'concat("{}:",{},":{}:{}")'.format(
+            boot_type, boot_device_expr, source_boot.size, source_boot.sha1)
+        script.PatchPartitionExpr(target_expr, source_expr, '"boot.img.p"')
+    else:
+      logger.info("boot image unchanged; skipping.")
+
+  # Do device-specific installation (eg, write radio image).
+  device_specific.IncrementalOTA_InstallEnd()
+
+  if OPTIONS.extra_script is not None:
+    script.AppendExtra(OPTIONS.extra_script)
+
+  if OPTIONS.wipe_user_data:
+    script.Print("Erasing user data...")
+    script.FormatPartition("/data")
+
+  if OPTIONS.two_step:
+    script.AppendExtra("""
+set_stage("%(bcb_dev)s", "");
+endif;
+endif;
+""" % bcb_dev)
+
+  script.SetProgress(1)
+  # For downgrade OTAs, we prefer to use the update-binary in the source
+  # build that is actually newer than the one in the target build.
+  if OPTIONS.downgrade:
+    script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
+  else:
+    script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+  metadata["ota-required-cache"] = str(script.required_cache)
+
+  # We haven't written the metadata entry yet, which will be handled in
+  # FinalizeMetadata().
+  common.ZipClose(output_zip)
+
+  # Sign the generated zip package unless no_signing is specified.
+  needed_property_files = (
+      NonAbOtaPropertyFiles(),
+  )
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+
+
+def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
+  """Generates a non-A/B OTA package."""
+  # Check the loaded info dicts first.
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
+
+  # Non-A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size")
+  if cache_size is None:
+    logger.warning("--- can't determine the cache partition size ---")
+  OPTIONS.cache_size = cache_size
+
+  if OPTIONS.extra_script is not None:
+    with open(OPTIONS.extra_script) as fp:
+      OPTIONS.extra_script = fp.read()
+
+  if OPTIONS.extracted_input is not None:
+    OPTIONS.input_tmp = OPTIONS.extracted_input
+  else:
+    logger.info("unzipping target target-files...")
+    OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+  OPTIONS.target_tmp = OPTIONS.input_tmp
+
+  # If the caller explicitly specified the device-specific extensions path via
+  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
+  # is present in the target target_files. Otherwise, take the path of the file
+  # from 'tool_extensions' in the info dict and look for that in the local
+  # filesystem, relative to the current directory.
+  if OPTIONS.device_specific is None:
+    from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
+    if os.path.exists(from_input):
+      logger.info("(using device-specific extensions from target_files)")
+      OPTIONS.device_specific = from_input
+    else:
+      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
+
+  if OPTIONS.device_specific is not None:
+    OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
+
+  # Generate a full OTA.
+  if source_file is None:
+    with zipfile.ZipFile(target_file) as input_zip:
+      WriteFullOTAPackage(
+          input_zip,
+          output_file)
+
+  # Generate an incremental OTA.
+  else:
+    logger.info("unzipping source target-files...")
+    OPTIONS.source_tmp = common.UnzipTemp(
+        OPTIONS.incremental_source, UNZIP_PATTERN)
+    with zipfile.ZipFile(target_file) as input_zip, \
+            zipfile.ZipFile(source_file) as source_zip:
+      WriteBlockIncrementalOTAPackage(
+          input_zip,
+          source_zip,
+          output_file)
+
+
+def WriteFingerprintAssertion(script, target_info, source_info):
+  source_oem_props = source_info.oem_props
+  target_oem_props = target_info.oem_props
+
+  if source_oem_props is None and target_oem_props is None:
+    script.AssertSomeFingerprint(
+        source_info.fingerprint, target_info.fingerprint)
+  elif source_oem_props is not None and target_oem_props is not None:
+    script.AssertSomeThumbprint(
+        target_info.GetBuildProp("ro.build.thumbprint"),
+        source_info.GetBuildProp("ro.build.thumbprint"))
+  elif source_oem_props is None and target_oem_props is not None:
+    script.AssertFingerprintOrThumbprint(
+        source_info.fingerprint,
+        target_info.GetBuildProp("ro.build.thumbprint"))
+  else:
+    script.AssertFingerprintOrThumbprint(
+        target_info.fingerprint,
+        source_info.GetBuildProp("ro.build.thumbprint"))
+
+
+class NonAbOtaPropertyFiles(PropertyFiles):
+  """The property-files for non-A/B OTA.
+
+  For non-A/B OTA, the property-files string contains the info for METADATA
+  entry, with which a system updater can be fetched the package metadata prior
+  to downloading the entire package.
+  """
+
+  def __init__(self):
+    super(NonAbOtaPropertyFiles, self).__init__()
+    self.name = 'ota-property-files'
+
+
+def _WriteRecoveryImageToBoot(script, output_zip):
+  """Find and write recovery image to /boot in two-step OTA.
+
+  In two-step OTAs, we write recovery image to /boot as the first step so that
+  we can reboot to there and install a new recovery image to /recovery.
+  A special "recovery-two-step.img" will be preferred, which encodes the correct
+  path of "/boot". Otherwise the device may show "device is corrupt" message
+  when booting into /boot.
+
+  Fall back to using the regular recovery.img if the two-step recovery image
+  doesn't exist. Note that rebuilding the special image at this point may be
+  infeasible, because we don't have the desired boot signer and keys when
+  calling ota_from_target_files.py.
+  """
+
+  recovery_two_step_img_name = "recovery-two-step.img"
+  recovery_two_step_img_path = os.path.join(
+      OPTIONS.input_tmp, "OTA", recovery_two_step_img_name)
+  if os.path.exists(recovery_two_step_img_path):
+    common.ZipWrite(
+        output_zip,
+        recovery_two_step_img_path,
+        arcname=recovery_two_step_img_name)
+    logger.info(
+        "two-step package: using %s in stage 1/3", recovery_two_step_img_name)
+    script.WriteRawImage("/boot", recovery_two_step_img_name)
+  else:
+    logger.info("two-step package: using recovery.img in stage 1/3")
+    # The "recovery.img" entry has been written into package earlier.
+    script.WriteRawImage("/boot", "recovery.img")
+
+
+def HasRecoveryPatch(target_files_zip, info_dict):
+  board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
+
+  if board_uses_vendorimage:
+    target_files_dir = "VENDOR"
+  else:
+    target_files_dir = "SYSTEM/vendor"
+
+  patch = "%s/recovery-from-boot.p" % target_files_dir
+  img = "%s/etc/recovery.img" % target_files_dir
+
+  namelist = target_files_zip.namelist()
+  return patch in namelist or img in namelist
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index b70044e..f42974f 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -206,9 +206,6 @@
 
 from __future__ import print_function
 
-import collections
-import copy
-import itertools
 import logging
 import multiprocessing
 import os.path
@@ -218,12 +215,12 @@
 import sys
 import zipfile
 
-import check_target_files_vintf
 import common
-import edify_generator
 import target_files_diff
-import verity_utils
-
+from check_target_files_vintf import CheckVintfIfTrebleEnabled
+from non_ab_ota import GenerateNonAbOtaPackage
+from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
+                       PropertyFiles)
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -270,11 +267,10 @@
 OPTIONS.boot_variable_file = None
 
 
-METADATA_NAME = 'META-INF/com/android/metadata'
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
 AB_PARTITIONS = 'META/ab_partitions.txt'
-UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+
 # Files to be unzipped for target diffing purpose.
 TARGET_DIFFING_UNZIP_PATTERN = ['BOOT', 'RECOVERY', 'SYSTEM/*', 'VENDOR/*',
                                 'PRODUCT/*', 'SYSTEM_EXT/*', 'ODM/*',
@@ -488,13 +484,6 @@
                     compress_type=zipfile.ZIP_STORED)
 
 
-def SignOutput(temp_zip_name, output_zip_name):
-  pw = OPTIONS.key_passwords[OPTIONS.package_key]
-
-  common.SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
-                  whole_file=True)
-
-
 def _LoadOemDicts(oem_source):
   """Returns the list of loaded OEM properties dict."""
   if not oem_source:
@@ -507,658 +496,6 @@
   return oem_dicts
 
 
-def _WriteRecoveryImageToBoot(script, output_zip):
-  """Find and write recovery image to /boot in two-step OTA.
-
-  In two-step OTAs, we write recovery image to /boot as the first step so that
-  we can reboot to there and install a new recovery image to /recovery.
-  A special "recovery-two-step.img" will be preferred, which encodes the correct
-  path of "/boot". Otherwise the device may show "device is corrupt" message
-  when booting into /boot.
-
-  Fall back to using the regular recovery.img if the two-step recovery image
-  doesn't exist. Note that rebuilding the special image at this point may be
-  infeasible, because we don't have the desired boot signer and keys when
-  calling ota_from_target_files.py.
-  """
-
-  recovery_two_step_img_name = "recovery-two-step.img"
-  recovery_two_step_img_path = os.path.join(
-      OPTIONS.input_tmp, "OTA", recovery_two_step_img_name)
-  if os.path.exists(recovery_two_step_img_path):
-    common.ZipWrite(
-        output_zip,
-        recovery_two_step_img_path,
-        arcname=recovery_two_step_img_name)
-    logger.info(
-        "two-step package: using %s in stage 1/3", recovery_two_step_img_name)
-    script.WriteRawImage("/boot", recovery_two_step_img_name)
-  else:
-    logger.info("two-step package: using recovery.img in stage 1/3")
-    # The "recovery.img" entry has been written into package earlier.
-    script.WriteRawImage("/boot", "recovery.img")
-
-
-def HasRecoveryPatch(target_files_zip, info_dict):
-  board_uses_vendorimage = info_dict.get("board_uses_vendorimage") == "true"
-
-  if board_uses_vendorimage:
-    target_files_dir = "VENDOR"
-  else:
-    target_files_dir = "SYSTEM/vendor"
-
-  patch = "%s/recovery-from-boot.p" % target_files_dir
-  img = "%s/etc/recovery.img" % target_files_dir
-
-  namelist = target_files_zip.namelist()
-  return patch in namelist or img in namelist
-
-
-def HasPartition(target_files_zip, partition):
-  try:
-    target_files_zip.getinfo(partition.upper() + "/")
-    return True
-  except KeyError:
-    return False
-
-
-def HasTrebleEnabled(target_files, target_info):
-  def HasVendorPartition(target_files):
-    if os.path.isdir(target_files):
-      return os.path.isdir(os.path.join(target_files, "VENDOR"))
-    if zipfile.is_zipfile(target_files):
-      return HasPartition(zipfile.ZipFile(target_files), "vendor")
-    raise ValueError("Unknown target_files argument")
-
-  return (HasVendorPartition(target_files) and
-          target_info.GetBuildProp("ro.treble.enabled") == "true")
-
-
-def WriteFingerprintAssertion(script, target_info, source_info):
-  source_oem_props = source_info.oem_props
-  target_oem_props = target_info.oem_props
-
-  if source_oem_props is None and target_oem_props is None:
-    script.AssertSomeFingerprint(
-        source_info.fingerprint, target_info.fingerprint)
-  elif source_oem_props is not None and target_oem_props is not None:
-    script.AssertSomeThumbprint(
-        target_info.GetBuildProp("ro.build.thumbprint"),
-        source_info.GetBuildProp("ro.build.thumbprint"))
-  elif source_oem_props is None and target_oem_props is not None:
-    script.AssertFingerprintOrThumbprint(
-        source_info.fingerprint,
-        target_info.GetBuildProp("ro.build.thumbprint"))
-  else:
-    script.AssertFingerprintOrThumbprint(
-        target_info.fingerprint,
-        source_info.GetBuildProp("ro.build.thumbprint"))
-
-
-def CheckVintfIfTrebleEnabled(target_files, target_info):
-  """Checks compatibility info of the input target files.
-
-  Metadata used for compatibility verification is retrieved from target_zip.
-
-  Compatibility should only be checked for devices that have enabled
-  Treble support.
-
-  Args:
-    target_files: Path to zip file containing the source files to be included
-        for OTA. Can also be the path to extracted directory.
-    target_info: The BuildInfo instance that holds the target build info.
-  """
-
-  # Will only proceed if the target has enabled the Treble support (as well as
-  # having a /vendor partition).
-  if not HasTrebleEnabled(target_files, target_info):
-    return
-
-  # Skip adding the compatibility package as a workaround for b/114240221. The
-  # compatibility will always fail on devices without qualified kernels.
-  if OPTIONS.skip_compatibility_check:
-    return
-
-  if not check_target_files_vintf.CheckVintf(target_files, target_info):
-    raise RuntimeError("VINTF compatibility check failed")
-
-
-def GetBlockDifferences(target_zip, source_zip, target_info, source_info,
-                        device_specific):
-  """Returns a ordered dict of block differences with partition name as key."""
-
-  def GetIncrementalBlockDifferenceForPartition(name):
-    if not HasPartition(source_zip, name):
-      raise RuntimeError(
-          "can't generate incremental that adds {}".format(name))
-
-    partition_src = common.GetUserImage(name, OPTIONS.source_tmp, source_zip,
-                                        info_dict=source_info,
-                                        allow_shared_blocks=allow_shared_blocks)
-
-    hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
-        name, 4096, target_info)
-    partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
-                                        info_dict=target_info,
-                                        allow_shared_blocks=allow_shared_blocks,
-                                        hashtree_info_generator=hashtree_info_generator)
-
-    # Check the first block of the source system partition for remount R/W only
-    # if the filesystem is ext4.
-    partition_source_info = source_info["fstab"]["/" + name]
-    check_first_block = partition_source_info.fs_type == "ext4"
-    # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
-    # in zip formats. However with squashfs, a) all files are compressed in LZ4;
-    # b) the blocks listed in block map may not contain all the bytes for a
-    # given file (because they're rounded to be 4K-aligned).
-    partition_target_info = target_info["fstab"]["/" + name]
-    disable_imgdiff = (partition_source_info.fs_type == "squashfs" or
-                       partition_target_info.fs_type == "squashfs")
-    return common.BlockDifference(name, partition_tgt, partition_src,
-                                  check_first_block,
-                                  version=blockimgdiff_version,
-                                  disable_imgdiff=disable_imgdiff)
-
-  if source_zip:
-    # See notes in common.GetUserImage()
-    allow_shared_blocks = (source_info.get('ext4_share_dup_blocks') == "true" or
-                           target_info.get('ext4_share_dup_blocks') == "true")
-    blockimgdiff_version = max(
-        int(i) for i in target_info.get(
-            "blockimgdiff_versions", "1").split(","))
-    assert blockimgdiff_version >= 3
-
-  block_diff_dict = collections.OrderedDict()
-  partition_names = ["system", "vendor", "product", "odm", "system_ext",
-                     "vendor_dlkm", "odm_dlkm"]
-  for partition in partition_names:
-    if not HasPartition(target_zip, partition):
-      continue
-    # Full OTA update.
-    if not source_zip:
-      tgt = common.GetUserImage(partition, OPTIONS.input_tmp, target_zip,
-                                info_dict=target_info,
-                                reset_file_map=True)
-      block_diff_dict[partition] = common.BlockDifference(partition, tgt,
-                                                          src=None)
-    # Incremental OTA update.
-    else:
-      block_diff_dict[partition] = GetIncrementalBlockDifferenceForPartition(
-          partition)
-  assert "system" in block_diff_dict
-
-  # Get the block diffs from the device specific script. If there is a
-  # duplicate block diff for a partition, ignore the diff in the generic script
-  # and use the one in the device specific script instead.
-  if source_zip:
-    device_specific_diffs = device_specific.IncrementalOTA_GetBlockDifferences()
-    function_name = "IncrementalOTA_GetBlockDifferences"
-  else:
-    device_specific_diffs = device_specific.FullOTA_GetBlockDifferences()
-    function_name = "FullOTA_GetBlockDifferences"
-
-  if device_specific_diffs:
-    assert all(isinstance(diff, common.BlockDifference)
-               for diff in device_specific_diffs), \
-        "{} is not returning a list of BlockDifference objects".format(
-            function_name)
-    for diff in device_specific_diffs:
-      if diff.partition in block_diff_dict:
-        logger.warning("Duplicate block difference found. Device specific block"
-                       " diff for partition '%s' overrides the one in generic"
-                       " script.", diff.partition)
-      block_diff_dict[diff.partition] = diff
-
-  return block_diff_dict
-
-
-def WriteFullOTAPackage(input_zip, output_file):
-  target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
-
-  # We don't know what version it will be installed on top of. We expect the API
-  # just won't change very often. Similarly for fstab, it might have changed in
-  # the target build.
-  target_api_version = target_info["recovery_api_version"]
-  script = edify_generator.EdifyGenerator(target_api_version, target_info)
-
-  if target_info.oem_props and not OPTIONS.oem_no_mount:
-    target_info.WriteMountOemScript(script)
-
-  metadata = GetPackageMetadata(target_info)
-
-  if not OPTIONS.no_signing:
-    staging_file = common.MakeTempFile(suffix='.zip')
-  else:
-    staging_file = output_file
-
-  output_zip = zipfile.ZipFile(
-      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
-
-  device_specific = common.DeviceSpecificParams(
-      input_zip=input_zip,
-      input_version=target_api_version,
-      output_zip=output_zip,
-      script=script,
-      input_tmp=OPTIONS.input_tmp,
-      metadata=metadata,
-      info_dict=OPTIONS.info_dict)
-
-  assert HasRecoveryPatch(input_zip, info_dict=OPTIONS.info_dict)
-
-  # Assertions (e.g. downgrade check, device properties check).
-  ts = target_info.GetBuildProp("ro.build.date.utc")
-  ts_text = target_info.GetBuildProp("ro.build.date")
-  script.AssertOlderBuild(ts, ts_text)
-
-  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
-  device_specific.FullOTA_Assertions()
-
-  block_diff_dict = GetBlockDifferences(target_zip=input_zip, source_zip=None,
-                                        target_info=target_info,
-                                        source_info=None,
-                                        device_specific=device_specific)
-
-  # Two-step package strategy (in chronological order, which is *not*
-  # the order in which the generated script has things):
-  #
-  # if stage is not "2/3" or "3/3":
-  #    write recovery image to boot partition
-  #    set stage to "2/3"
-  #    reboot to boot partition and restart recovery
-  # else if stage is "2/3":
-  #    write recovery image to recovery partition
-  #    set stage to "3/3"
-  #    reboot to recovery partition and restart recovery
-  # else:
-  #    (stage must be "3/3")
-  #    set stage to ""
-  #    do normal full package installation:
-  #       wipe and install system, boot image, etc.
-  #       set up system to update recovery partition on first boot
-  #    complete script normally
-  #    (allow recovery to mark itself finished and reboot)
-
-  recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
-                                         OPTIONS.input_tmp, "RECOVERY")
-  if OPTIONS.two_step:
-    if not target_info.get("multistage_support"):
-      assert False, "two-step packages not supported by this build"
-    fs = target_info["fstab"]["/misc"]
-    assert fs.fs_type.upper() == "EMMC", \
-        "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev": fs.device}
-    common.ZipWriteStr(output_zip, "recovery.img", recovery_img.data)
-    script.AppendExtra("""
-if get_stage("%(bcb_dev)s") == "2/3" then
-""" % bcb_dev)
-
-    # Stage 2/3: Write recovery image to /recovery (currently running /boot).
-    script.Comment("Stage 2/3")
-    script.WriteRawImage("/recovery", "recovery.img")
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "3/3");
-reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s") == "3/3" then
-""" % bcb_dev)
-
-    # Stage 3/3: Make changes.
-    script.Comment("Stage 3/3")
-
-  # Dump fingerprints
-  script.Print("Target: {}".format(target_info.fingerprint))
-
-  device_specific.FullOTA_InstallBegin()
-
-  # All other partitions as well as the data wipe use 10% of the progress, and
-  # the update of the system partition takes the remaining progress.
-  system_progress = 0.9 - (len(block_diff_dict) - 1) * 0.1
-  if OPTIONS.wipe_user_data:
-    system_progress -= 0.1
-  progress_dict = {partition: 0.1 for partition in block_diff_dict}
-  progress_dict["system"] = system_progress
-
-  if target_info.get('use_dynamic_partitions') == "true":
-    # Use empty source_info_dict to indicate that all partitions / groups must
-    # be re-added.
-    dynamic_partitions_diff = common.DynamicPartitionsDifference(
-        info_dict=OPTIONS.info_dict,
-        block_diffs=block_diff_dict.values(),
-        progress_dict=progress_dict)
-    dynamic_partitions_diff.WriteScript(script, output_zip,
-                                        write_verify_script=OPTIONS.verify)
-  else:
-    for block_diff in block_diff_dict.values():
-      block_diff.WriteScript(script, output_zip,
-                             progress=progress_dict.get(block_diff.partition),
-                             write_verify_script=OPTIONS.verify)
-
-  CheckVintfIfTrebleEnabled(OPTIONS.input_tmp, target_info)
-
-  boot_img = common.GetBootableImage(
-      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
-  common.CheckSize(boot_img.data, "boot.img", target_info)
-  common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
-
-  script.WriteRawImage("/boot", "boot.img")
-
-  script.ShowProgress(0.1, 10)
-  device_specific.FullOTA_InstallEnd()
-
-  if OPTIONS.extra_script is not None:
-    script.AppendExtra(OPTIONS.extra_script)
-
-  script.UnmountAll()
-
-  if OPTIONS.wipe_user_data:
-    script.ShowProgress(0.1, 10)
-    script.FormatPartition("/data")
-
-  if OPTIONS.two_step:
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "");
-""" % bcb_dev)
-    script.AppendExtra("else\n")
-
-    # Stage 1/3: Nothing to verify for full OTA. Write recovery image to /boot.
-    script.Comment("Stage 1/3")
-    _WriteRecoveryImageToBoot(script, output_zip)
-
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "2/3");
-reboot_now("%(bcb_dev)s", "");
-endif;
-endif;
-""" % bcb_dev)
-
-  script.SetProgress(1)
-  script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
-  metadata["ota-required-cache"] = str(script.required_cache)
-
-  # We haven't written the metadata entry, which will be done in
-  # FinalizeMetadata.
-  common.ZipClose(output_zip)
-
-  needed_property_files = (
-      NonAbOtaPropertyFiles(),
-  )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
-
-
-def WriteMetadata(metadata, output):
-  """Writes the metadata to the zip archive or a file.
-
-  Args:
-    metadata: The metadata dict for the package.
-    output: A ZipFile object or a string of the output file path.
-  """
-
-  value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
-  if isinstance(output, zipfile.ZipFile):
-    common.ZipWriteStr(output, METADATA_NAME, value,
-                       compress_type=zipfile.ZIP_STORED)
-    return
-
-  with open(output, 'w') as f:
-    f.write(value)
-
-
-def HandleDowngradeMetadata(metadata, target_info, source_info):
-  # Only incremental OTAs are allowed to reach here.
-  assert OPTIONS.incremental_source is not None
-
-  post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
-  pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
-  is_downgrade = int(post_timestamp) < int(pre_timestamp)
-
-  if OPTIONS.downgrade:
-    if not is_downgrade:
-      raise RuntimeError(
-          "--downgrade or --override_timestamp specified but no downgrade "
-          "detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
-    metadata["ota-downgrade"] = "yes"
-  else:
-    if is_downgrade:
-      raise RuntimeError(
-          "Downgrade detected based on timestamp check: pre: %s, post: %s. "
-          "Need to specify --override_timestamp OR --downgrade to allow "
-          "building the incremental." % (pre_timestamp, post_timestamp))
-
-
-def GetPackageMetadata(target_info, source_info=None):
-  """Generates and returns the metadata dict.
-
-  It generates a dict() that contains the info to be written into an OTA
-  package (META-INF/com/android/metadata). It also handles the detection of
-  downgrade / data wipe based on the global options.
-
-  Args:
-    target_info: The BuildInfo instance that holds the target build info.
-    source_info: The BuildInfo instance that holds the source build info, or
-        None if generating full OTA.
-
-  Returns:
-    A dict to be written into package metadata entry.
-  """
-  assert isinstance(target_info, common.BuildInfo)
-  assert source_info is None or isinstance(source_info, common.BuildInfo)
-
-  separator = '|'
-
-  boot_variable_values = {}
-  if OPTIONS.boot_variable_file:
-    d = common.LoadDictionaryFromFile(OPTIONS.boot_variable_file)
-    for key, values in d.items():
-      boot_variable_values[key] = [val.strip() for val in values.split(',')]
-
-  post_build_devices, post_build_fingerprints = \
-      CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
-  metadata = {
-      'post-build': separator.join(sorted(post_build_fingerprints)),
-      'post-build-incremental': target_info.GetBuildProp(
-          'ro.build.version.incremental'),
-      'post-sdk-level': target_info.GetBuildProp(
-          'ro.build.version.sdk'),
-      'post-security-patch-level': target_info.GetBuildProp(
-          'ro.build.version.security_patch'),
-  }
-
-  if target_info.is_ab and not OPTIONS.force_non_ab:
-    metadata['ota-type'] = 'AB'
-    metadata['ota-required-cache'] = '0'
-  else:
-    metadata['ota-type'] = 'BLOCK'
-
-  if OPTIONS.wipe_user_data:
-    metadata['ota-wipe'] = 'yes'
-
-  if OPTIONS.retrofit_dynamic_partitions:
-    metadata['ota-retrofit-dynamic-partitions'] = 'yes'
-
-  is_incremental = source_info is not None
-  if is_incremental:
-    pre_build_devices, pre_build_fingerprints = \
-        CalculateRuntimeDevicesAndFingerprints(source_info,
-                                               boot_variable_values)
-    metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
-    metadata['pre-build-incremental'] = source_info.GetBuildProp(
-        'ro.build.version.incremental')
-    metadata['pre-device'] = separator.join(sorted(pre_build_devices))
-  else:
-    metadata['pre-device'] = separator.join(sorted(post_build_devices))
-
-  # Use the actual post-timestamp, even for a downgrade case.
-  metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
-
-  # Detect downgrades and set up downgrade flags accordingly.
-  if is_incremental:
-    HandleDowngradeMetadata(metadata, target_info, source_info)
-
-  return metadata
-
-
-class PropertyFiles(object):
-  """A class that computes the property-files string for an OTA package.
-
-  A property-files string is a comma-separated string that contains the
-  offset/size info for an OTA package. The entries, which must be ZIP_STORED,
-  can be fetched directly with the package URL along with the offset/size info.
-  These strings can be used for streaming A/B OTAs, or allowing an updater to
-  download package metadata entry directly, without paying the cost of
-  downloading entire package.
-
-  Computing the final property-files string requires two passes. Because doing
-  the whole package signing (with signapk.jar) will possibly reorder the ZIP
-  entries, which may in turn invalidate earlier computed ZIP entry offset/size
-  values.
-
-  This class provides functions to be called for each pass. The general flow is
-  as follows.
-
-    property_files = PropertyFiles()
-    # The first pass, which writes placeholders before doing initial signing.
-    property_files.Compute()
-    SignOutput()
-
-    # The second pass, by replacing the placeholders with actual data.
-    property_files.Finalize()
-    SignOutput()
-
-  And the caller can additionally verify the final result.
-
-    property_files.Verify()
-  """
-
-  def __init__(self):
-    self.name = None
-    self.required = ()
-    self.optional = ()
-
-  def Compute(self, input_zip):
-    """Computes and returns a property-files string with placeholders.
-
-    We reserve extra space for the offset and size of the metadata entry itself,
-    although we don't know the final values until the package gets signed.
-
-    Args:
-      input_zip: The input ZIP file.
-
-    Returns:
-      A string with placeholders for the metadata offset/size info, e.g.
-      "payload.bin:679:343,payload_properties.txt:378:45,metadata:        ".
-    """
-    return self.GetPropertyFilesString(input_zip, reserve_space=True)
-
-  class InsufficientSpaceException(Exception):
-    pass
-
-  def Finalize(self, input_zip, reserved_length):
-    """Finalizes a property-files string with actual METADATA offset/size info.
-
-    The input ZIP file has been signed, with the ZIP entries in the desired
-    place (signapk.jar will possibly reorder the ZIP entries). Now we compute
-    the ZIP entry offsets and construct the property-files string with actual
-    data. Note that during this process, we must pad the property-files string
-    to the reserved length, so that the METADATA entry size remains the same.
-    Otherwise the entries' offsets and sizes may change again.
-
-    Args:
-      input_zip: The input ZIP file.
-      reserved_length: The reserved length of the property-files string during
-          the call to Compute(). The final string must be no more than this
-          size.
-
-    Returns:
-      A property-files string including the metadata offset/size info, e.g.
-      "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379  ".
-
-    Raises:
-      InsufficientSpaceException: If the reserved length is insufficient to hold
-          the final string.
-    """
-    result = self.GetPropertyFilesString(input_zip, reserve_space=False)
-    if len(result) > reserved_length:
-      raise self.InsufficientSpaceException(
-          'Insufficient reserved space: reserved={}, actual={}'.format(
-              reserved_length, len(result)))
-
-    result += ' ' * (reserved_length - len(result))
-    return result
-
-  def Verify(self, input_zip, expected):
-    """Verifies the input ZIP file contains the expected property-files string.
-
-    Args:
-      input_zip: The input ZIP file.
-      expected: The property-files string that's computed from Finalize().
-
-    Raises:
-      AssertionError: On finding a mismatch.
-    """
-    actual = self.GetPropertyFilesString(input_zip)
-    assert actual == expected, \
-        "Mismatching streaming metadata: {} vs {}.".format(actual, expected)
-
-  def GetPropertyFilesString(self, zip_file, reserve_space=False):
-    """
-    Constructs the property-files string per request.
-
-    Args:
-      zip_file: The input ZIP file.
-      reserved_length: The reserved length of the property-files string.
-
-    Returns:
-      A property-files string including the metadata offset/size info, e.g.
-      "payload.bin:679:343,payload_properties.txt:378:45,metadata:     ".
-    """
-
-    def ComputeEntryOffsetSize(name):
-      """Computes the zip entry offset and size."""
-      info = zip_file.getinfo(name)
-      offset = info.header_offset
-      offset += zipfile.sizeFileHeader
-      offset += len(info.extra) + len(info.filename)
-      size = info.file_size
-      return '%s:%d:%d' % (os.path.basename(name), offset, size)
-
-    tokens = []
-    tokens.extend(self._GetPrecomputed(zip_file))
-    for entry in self.required:
-      tokens.append(ComputeEntryOffsetSize(entry))
-    for entry in self.optional:
-      if entry in zip_file.namelist():
-        tokens.append(ComputeEntryOffsetSize(entry))
-
-    # 'META-INF/com/android/metadata' is required. We don't know its actual
-    # offset and length (as well as the values for other entries). So we reserve
-    # 15-byte as a placeholder ('offset:length'), which is sufficient to cover
-    # the space for metadata entry. Because 'offset' allows a max of 10-digit
-    # (i.e. ~9 GiB), with a max of 4-digit for the length. Note that all the
-    # reserved space serves the metadata entry only.
-    if reserve_space:
-      tokens.append('metadata:' + ' ' * 15)
-    else:
-      tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
-
-    return ','.join(tokens)
-
-  def _GetPrecomputed(self, input_zip):
-    """Computes the additional tokens to be included into the property-files.
-
-    This applies to tokens without actual ZIP entries, such as
-    payload_metadadata.bin. We want to expose the offset/size to updaters, so
-    that they can download the payload metadata directly with the info.
-
-    Args:
-      input_zip: The input zip file.
-
-    Returns:
-      A list of strings (tokens) to be added to the property-files string.
-    """
-    # pylint: disable=no-self-use
-    # pylint: disable=unused-argument
-    return []
-
-
 class StreamingPropertyFiles(PropertyFiles):
   """A subclass for computing the property-files for streaming A/B OTAs."""
 
@@ -1264,362 +601,6 @@
     return (payload_offset, metadata_total)
 
 
-class NonAbOtaPropertyFiles(PropertyFiles):
-  """The property-files for non-A/B OTA.
-
-  For non-A/B OTA, the property-files string contains the info for METADATA
-  entry, with which a system updater can be fetched the package metadata prior
-  to downloading the entire package.
-  """
-
-  def __init__(self):
-    super(NonAbOtaPropertyFiles, self).__init__()
-    self.name = 'ota-property-files'
-
-
-def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
-  """Finalizes the metadata and signs an A/B OTA package.
-
-  In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
-  that contains the offsets and sizes for the ZIP entries. An example
-  property-files string is as follows.
-
-    "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379"
-
-  OTA server can pass down this string, in addition to the package URL, to the
-  system update client. System update client can then fetch individual ZIP
-  entries (ZIP_STORED) directly at the given offset of the URL.
-
-  Args:
-    metadata: The metadata dict for the package.
-    input_file: The input ZIP filename that doesn't contain the package METADATA
-        entry yet.
-    output_file: The final output ZIP filename.
-    needed_property_files: The list of PropertyFiles' to be generated.
-  """
-
-  def ComputeAllPropertyFiles(input_file, needed_property_files):
-    # Write the current metadata entry with placeholders.
-    with zipfile.ZipFile(input_file) as input_zip:
-      for property_files in needed_property_files:
-        metadata[property_files.name] = property_files.Compute(input_zip)
-      namelist = input_zip.namelist()
-
-    if METADATA_NAME in namelist:
-      common.ZipDelete(input_file, METADATA_NAME)
-    output_zip = zipfile.ZipFile(input_file, 'a')
-    WriteMetadata(metadata, output_zip)
-    common.ZipClose(output_zip)
-
-    if OPTIONS.no_signing:
-      return input_file
-
-    prelim_signing = common.MakeTempFile(suffix='.zip')
-    SignOutput(input_file, prelim_signing)
-    return prelim_signing
-
-  def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
-    with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
-      for property_files in needed_property_files:
-        metadata[property_files.name] = property_files.Finalize(
-            prelim_signing_zip, len(metadata[property_files.name]))
-
-  # SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
-  # entries, as well as padding the entry headers. We do a preliminary signing
-  # (with an incomplete metadata entry) to allow that to happen. Then compute
-  # the ZIP entry offsets, write back the final metadata and do the final
-  # signing.
-  prelim_signing = ComputeAllPropertyFiles(input_file, needed_property_files)
-  try:
-    FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
-  except PropertyFiles.InsufficientSpaceException:
-    # Even with the preliminary signing, the entry orders may change
-    # dramatically, which leads to insufficiently reserved space during the
-    # first call to ComputeAllPropertyFiles(). In that case, we redo all the
-    # preliminary signing works, based on the already ordered ZIP entries, to
-    # address the issue.
-    prelim_signing = ComputeAllPropertyFiles(
-        prelim_signing, needed_property_files)
-    FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
-
-  # Replace the METADATA entry.
-  common.ZipDelete(prelim_signing, METADATA_NAME)
-  output_zip = zipfile.ZipFile(prelim_signing, 'a')
-  WriteMetadata(metadata, output_zip)
-  common.ZipClose(output_zip)
-
-  # Re-sign the package after updating the metadata entry.
-  if OPTIONS.no_signing:
-    output_file = prelim_signing
-  else:
-    SignOutput(prelim_signing, output_file)
-
-  # Reopen the final signed zip to double check the streaming metadata.
-  with zipfile.ZipFile(output_file) as output_zip:
-    for property_files in needed_property_files:
-      property_files.Verify(output_zip, metadata[property_files.name].strip())
-
-  # If requested, dump the metadata to a separate file.
-  output_metadata_path = OPTIONS.output_metadata_path
-  if output_metadata_path:
-    WriteMetadata(metadata, output_metadata_path)
-
-
-def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
-  target_info = common.BuildInfo(OPTIONS.target_info_dict, OPTIONS.oem_dicts)
-  source_info = common.BuildInfo(OPTIONS.source_info_dict, OPTIONS.oem_dicts)
-
-  target_api_version = target_info["recovery_api_version"]
-  source_api_version = source_info["recovery_api_version"]
-  if source_api_version == 0:
-    logger.warning(
-        "Generating edify script for a source that can't install it.")
-
-  script = edify_generator.EdifyGenerator(
-      source_api_version, target_info, fstab=source_info["fstab"])
-
-  if target_info.oem_props or source_info.oem_props:
-    if not OPTIONS.oem_no_mount:
-      source_info.WriteMountOemScript(script)
-
-  metadata = GetPackageMetadata(target_info, source_info)
-
-  if not OPTIONS.no_signing:
-    staging_file = common.MakeTempFile(suffix='.zip')
-  else:
-    staging_file = output_file
-
-  output_zip = zipfile.ZipFile(
-      staging_file, "w", compression=zipfile.ZIP_DEFLATED)
-
-  device_specific = common.DeviceSpecificParams(
-      source_zip=source_zip,
-      source_version=source_api_version,
-      source_tmp=OPTIONS.source_tmp,
-      target_zip=target_zip,
-      target_version=target_api_version,
-      target_tmp=OPTIONS.target_tmp,
-      output_zip=output_zip,
-      script=script,
-      metadata=metadata,
-      info_dict=source_info)
-
-  source_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT", source_info)
-  target_boot = common.GetBootableImage(
-      "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT", target_info)
-  updating_boot = (not OPTIONS.two_step and
-                   (source_boot.data != target_boot.data))
-
-  target_recovery = common.GetBootableImage(
-      "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
-
-  block_diff_dict = GetBlockDifferences(target_zip=target_zip,
-                                        source_zip=source_zip,
-                                        target_info=target_info,
-                                        source_info=source_info,
-                                        device_specific=device_specific)
-
-  CheckVintfIfTrebleEnabled(OPTIONS.target_tmp, target_info)
-
-  # Assertions (e.g. device properties check).
-  target_info.WriteDeviceAssertions(script, OPTIONS.oem_no_mount)
-  device_specific.IncrementalOTA_Assertions()
-
-  # Two-step incremental package strategy (in chronological order,
-  # which is *not* the order in which the generated script has
-  # things):
-  #
-  # if stage is not "2/3" or "3/3":
-  #    do verification on current system
-  #    write recovery image to boot partition
-  #    set stage to "2/3"
-  #    reboot to boot partition and restart recovery
-  # else if stage is "2/3":
-  #    write recovery image to recovery partition
-  #    set stage to "3/3"
-  #    reboot to recovery partition and restart recovery
-  # else:
-  #    (stage must be "3/3")
-  #    perform update:
-  #       patch system files, etc.
-  #       force full install of new boot image
-  #       set up system to update recovery partition on first boot
-  #    complete script normally
-  #    (allow recovery to mark itself finished and reboot)
-
-  if OPTIONS.two_step:
-    if not source_info.get("multistage_support"):
-      assert False, "two-step packages not supported by this build"
-    fs = source_info["fstab"]["/misc"]
-    assert fs.fs_type.upper() == "EMMC", \
-        "two-step packages only supported on devices with EMMC /misc partitions"
-    bcb_dev = {"bcb_dev": fs.device}
-    common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
-    script.AppendExtra("""
-if get_stage("%(bcb_dev)s") == "2/3" then
-""" % bcb_dev)
-
-    # Stage 2/3: Write recovery image to /recovery (currently running /boot).
-    script.Comment("Stage 2/3")
-    script.AppendExtra("sleep(20);\n")
-    script.WriteRawImage("/recovery", "recovery.img")
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "3/3");
-reboot_now("%(bcb_dev)s", "recovery");
-else if get_stage("%(bcb_dev)s") != "3/3" then
-""" % bcb_dev)
-
-    # Stage 1/3: (a) Verify the current system.
-    script.Comment("Stage 1/3")
-
-  # Dump fingerprints
-  script.Print("Source: {}".format(source_info.fingerprint))
-  script.Print("Target: {}".format(target_info.fingerprint))
-
-  script.Print("Verifying current system...")
-
-  device_specific.IncrementalOTA_VerifyBegin()
-
-  WriteFingerprintAssertion(script, target_info, source_info)
-
-  # Check the required cache size (i.e. stashed blocks).
-  required_cache_sizes = [diff.required_cache for diff in
-                          block_diff_dict.values()]
-  if updating_boot:
-    boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot",
-                                                              source_info)
-    d = common.Difference(target_boot, source_boot)
-    _, _, d = d.ComputePatch()
-    if d is None:
-      include_full_boot = True
-      common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
-    else:
-      include_full_boot = False
-
-      logger.info(
-          "boot      target: %d  source: %d  diff: %d", target_boot.size,
-          source_boot.size, len(d))
-
-      common.ZipWriteStr(output_zip, "boot.img.p", d)
-
-      target_expr = 'concat("{}:",{},":{}:{}")'.format(
-          boot_type, boot_device_expr, target_boot.size, target_boot.sha1)
-      source_expr = 'concat("{}:",{},":{}:{}")'.format(
-          boot_type, boot_device_expr, source_boot.size, source_boot.sha1)
-      script.PatchPartitionExprCheck(target_expr, source_expr)
-
-      required_cache_sizes.append(target_boot.size)
-
-  if required_cache_sizes:
-    script.CacheFreeSpaceCheck(max(required_cache_sizes))
-
-  # Verify the existing partitions.
-  for diff in block_diff_dict.values():
-    diff.WriteVerifyScript(script, touched_blocks_only=True)
-
-  device_specific.IncrementalOTA_VerifyEnd()
-
-  if OPTIONS.two_step:
-    # Stage 1/3: (b) Write recovery image to /boot.
-    _WriteRecoveryImageToBoot(script, output_zip)
-
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "2/3");
-reboot_now("%(bcb_dev)s", "");
-else
-""" % bcb_dev)
-
-    # Stage 3/3: Make changes.
-    script.Comment("Stage 3/3")
-
-  script.Comment("---- start making changes here ----")
-
-  device_specific.IncrementalOTA_InstallBegin()
-
-  progress_dict = {partition: 0.1 for partition in block_diff_dict}
-  progress_dict["system"] = 1 - len(block_diff_dict) * 0.1
-
-  if OPTIONS.source_info_dict.get("use_dynamic_partitions") == "true":
-    if OPTIONS.target_info_dict.get("use_dynamic_partitions") != "true":
-      raise RuntimeError(
-          "can't generate incremental that disables dynamic partitions")
-    dynamic_partitions_diff = common.DynamicPartitionsDifference(
-        info_dict=OPTIONS.target_info_dict,
-        source_info_dict=OPTIONS.source_info_dict,
-        block_diffs=block_diff_dict.values(),
-        progress_dict=progress_dict)
-    dynamic_partitions_diff.WriteScript(
-        script, output_zip, write_verify_script=OPTIONS.verify)
-  else:
-    for block_diff in block_diff_dict.values():
-      block_diff.WriteScript(script, output_zip,
-                             progress=progress_dict.get(block_diff.partition),
-                             write_verify_script=OPTIONS.verify)
-
-  if OPTIONS.two_step:
-    common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
-    script.WriteRawImage("/boot", "boot.img")
-    logger.info("writing full boot image (forced by two-step mode)")
-
-  if not OPTIONS.two_step:
-    if updating_boot:
-      if include_full_boot:
-        logger.info("boot image changed; including full.")
-        script.Print("Installing boot image...")
-        script.WriteRawImage("/boot", "boot.img")
-      else:
-        # Produce the boot image by applying a patch to the current
-        # contents of the boot partition, and write it back to the
-        # partition.
-        logger.info("boot image changed; including patch.")
-        script.Print("Patching boot image...")
-        script.ShowProgress(0.1, 10)
-        target_expr = 'concat("{}:",{},":{}:{}")'.format(
-            boot_type, boot_device_expr, target_boot.size, target_boot.sha1)
-        source_expr = 'concat("{}:",{},":{}:{}")'.format(
-            boot_type, boot_device_expr, source_boot.size, source_boot.sha1)
-        script.PatchPartitionExpr(target_expr, source_expr, '"boot.img.p"')
-    else:
-      logger.info("boot image unchanged; skipping.")
-
-  # Do device-specific installation (eg, write radio image).
-  device_specific.IncrementalOTA_InstallEnd()
-
-  if OPTIONS.extra_script is not None:
-    script.AppendExtra(OPTIONS.extra_script)
-
-  if OPTIONS.wipe_user_data:
-    script.Print("Erasing user data...")
-    script.FormatPartition("/data")
-
-  if OPTIONS.two_step:
-    script.AppendExtra("""
-set_stage("%(bcb_dev)s", "");
-endif;
-endif;
-""" % bcb_dev)
-
-  script.SetProgress(1)
-  # For downgrade OTAs, we prefer to use the update-binary in the source
-  # build that is actually newer than the one in the target build.
-  if OPTIONS.downgrade:
-    script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
-  else:
-    script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
-  metadata["ota-required-cache"] = str(script.required_cache)
-
-  # We haven't written the metadata entry yet, which will be handled in
-  # FinalizeMetadata().
-  common.ZipClose(output_zip)
-
-  # Sign the generated zip package unless no_signing is specified.
-  needed_property_files = (
-      NonAbOtaPropertyFiles(),
-  )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
-
-
 def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
   """Returns a target-files.zip file for generating secondary payload.
 
@@ -1938,104 +919,6 @@
   FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
 
 
-def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
-  """Generates a non-A/B OTA package."""
-  # Check the loaded info dicts first.
-  if OPTIONS.info_dict.get("no_recovery") == "true":
-    raise common.ExternalError(
-        "--- target build has specified no recovery ---")
-
-  # Non-A/B OTAs rely on /cache partition to store temporary files.
-  cache_size = OPTIONS.info_dict.get("cache_size")
-  if cache_size is None:
-    logger.warning("--- can't determine the cache partition size ---")
-  OPTIONS.cache_size = cache_size
-
-  if OPTIONS.extra_script is not None:
-    with open(OPTIONS.extra_script) as fp:
-      OPTIONS.extra_script = fp.read()
-
-  if OPTIONS.extracted_input is not None:
-    OPTIONS.input_tmp = OPTIONS.extracted_input
-  else:
-    logger.info("unzipping target target-files...")
-    OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
-  OPTIONS.target_tmp = OPTIONS.input_tmp
-
-  # If the caller explicitly specified the device-specific extensions path via
-  # -s / --device_specific, use that. Otherwise, use META/releasetools.py if it
-  # is present in the target target_files. Otherwise, take the path of the file
-  # from 'tool_extensions' in the info dict and look for that in the local
-  # filesystem, relative to the current directory.
-  if OPTIONS.device_specific is None:
-    from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
-    if os.path.exists(from_input):
-      logger.info("(using device-specific extensions from target_files)")
-      OPTIONS.device_specific = from_input
-    else:
-      OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions")
-
-  if OPTIONS.device_specific is not None:
-    OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
-
-  # Generate a full OTA.
-  if source_file is None:
-    with zipfile.ZipFile(target_file) as input_zip:
-      WriteFullOTAPackage(
-          input_zip,
-          output_file)
-
-  # Generate an incremental OTA.
-  else:
-    logger.info("unzipping source target-files...")
-    OPTIONS.source_tmp = common.UnzipTemp(
-        OPTIONS.incremental_source, UNZIP_PATTERN)
-    with zipfile.ZipFile(target_file) as input_zip, \
-            zipfile.ZipFile(source_file) as source_zip:
-      WriteBlockIncrementalOTAPackage(
-          input_zip,
-          source_zip,
-          output_file)
-
-
-def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values):
-  """Returns a tuple of sets for runtime devices and fingerprints"""
-
-  device_names = {build_info.device}
-  fingerprints = {build_info.fingerprint}
-
-  if not boot_variable_values:
-    return device_names, fingerprints
-
-  # Calculate all possible combinations of the values for the boot variables.
-  keys = boot_variable_values.keys()
-  value_list = boot_variable_values.values()
-  combinations = [dict(zip(keys, values))
-                  for values in itertools.product(*value_list)]
-  for placeholder_values in combinations:
-    # Reload the info_dict as some build properties may change their values
-    # based on the value of ro.boot* properties.
-    info_dict = copy.deepcopy(build_info.info_dict)
-    for partition in common.PARTITIONS_WITH_CARE_MAP:
-      partition_prop_key = "{}.build.prop".format(partition)
-      input_file = info_dict[partition_prop_key].input_file
-      if isinstance(input_file, zipfile.ZipFile):
-        with zipfile.ZipFile(input_file.filename) as input_zip:
-          info_dict[partition_prop_key] = \
-              common.PartitionBuildProps.FromInputFile(input_zip, partition,
-                                                       placeholder_values)
-      else:
-        info_dict[partition_prop_key] = \
-            common.PartitionBuildProps.FromInputFile(input_file, partition,
-                                                     placeholder_values)
-    info_dict["build.prop"] = info_dict["system.build.prop"]
-
-    new_build_info = common.BuildInfo(info_dict, build_info.oem_dicts)
-    device_names.add(new_build_info.device)
-    fingerprints.add(new_build_info.fingerprint)
-  return device_names, fingerprints
-
-
 def main(argv):
 
   def option_handler(o, a):
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
new file mode 100644
index 0000000..874ab95
--- /dev/null
+++ b/tools/releasetools/ota_utils.py
@@ -0,0 +1,433 @@
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import copy
+import itertools
+import os
+import zipfile
+
+from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
+                    ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
+                    SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
+
+METADATA_NAME = 'META-INF/com/android/metadata'
+UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+
+
+def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
+  """Finalizes the metadata and signs an A/B OTA package.
+
+  In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
+  that contains the offsets and sizes for the ZIP entries. An example
+  property-files string is as follows.
+
+    "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379"
+
+  OTA server can pass down this string, in addition to the package URL, to the
+  system update client. System update client can then fetch individual ZIP
+  entries (ZIP_STORED) directly at the given offset of the URL.
+
+  Args:
+    metadata: The metadata dict for the package.
+    input_file: The input ZIP filename that doesn't contain the package METADATA
+        entry yet.
+    output_file: The final output ZIP filename.
+    needed_property_files: The list of PropertyFiles' to be generated.
+  """
+
+  def ComputeAllPropertyFiles(input_file, needed_property_files):
+    # Write the current metadata entry with placeholders.
+    with zipfile.ZipFile(input_file) as input_zip:
+      for property_files in needed_property_files:
+        metadata[property_files.name] = property_files.Compute(input_zip)
+      namelist = input_zip.namelist()
+
+    if METADATA_NAME in namelist:
+      ZipDelete(input_file, METADATA_NAME)
+    output_zip = zipfile.ZipFile(input_file, 'a')
+    WriteMetadata(metadata, output_zip)
+    ZipClose(output_zip)
+
+    if OPTIONS.no_signing:
+      return input_file
+
+    prelim_signing = MakeTempFile(suffix='.zip')
+    SignOutput(input_file, prelim_signing)
+    return prelim_signing
+
+  def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
+    with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
+      for property_files in needed_property_files:
+        metadata[property_files.name] = property_files.Finalize(
+            prelim_signing_zip, len(metadata[property_files.name]))
+
+  # SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
+  # entries, as well as padding the entry headers. We do a preliminary signing
+  # (with an incomplete metadata entry) to allow that to happen. Then compute
+  # the ZIP entry offsets, write back the final metadata and do the final
+  # signing.
+  prelim_signing = ComputeAllPropertyFiles(input_file, needed_property_files)
+  try:
+    FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
+  except PropertyFiles.InsufficientSpaceException:
+    # Even with the preliminary signing, the entry orders may change
+    # dramatically, which leads to insufficiently reserved space during the
+    # first call to ComputeAllPropertyFiles(). In that case, we redo all the
+    # preliminary signing works, based on the already ordered ZIP entries, to
+    # address the issue.
+    prelim_signing = ComputeAllPropertyFiles(
+        prelim_signing, needed_property_files)
+    FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
+
+  # Replace the METADATA entry.
+  ZipDelete(prelim_signing, METADATA_NAME)
+  output_zip = zipfile.ZipFile(prelim_signing, 'a')
+  WriteMetadata(metadata, output_zip)
+  ZipClose(output_zip)
+
+  # Re-sign the package after updating the metadata entry.
+  if OPTIONS.no_signing:
+    output_file = prelim_signing
+  else:
+    SignOutput(prelim_signing, output_file)
+
+  # Reopen the final signed zip to double check the streaming metadata.
+  with zipfile.ZipFile(output_file) as output_zip:
+    for property_files in needed_property_files:
+      property_files.Verify(output_zip, metadata[property_files.name].strip())
+
+  # If requested, dump the metadata to a separate file.
+  output_metadata_path = OPTIONS.output_metadata_path
+  if output_metadata_path:
+    WriteMetadata(metadata, output_metadata_path)
+
+
+def WriteMetadata(metadata, output):
+  """Writes the metadata to the zip archive or a file.
+
+  Args:
+    metadata: The metadata dict for the package.
+    output: A ZipFile object or a string of the output file path.
+  """
+
+  value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
+  if isinstance(output, zipfile.ZipFile):
+    ZipWriteStr(output, METADATA_NAME, value,
+                compress_type=zipfile.ZIP_STORED)
+    return
+
+  with open(output, 'w') as f:
+    f.write(value)
+
+
+def GetPackageMetadata(target_info, source_info=None):
+  """Generates and returns the metadata dict.
+
+  It generates a dict() that contains the info to be written into an OTA
+  package (META-INF/com/android/metadata). It also handles the detection of
+  downgrade / data wipe based on the global options.
+
+  Args:
+    target_info: The BuildInfo instance that holds the target build info.
+    source_info: The BuildInfo instance that holds the source build info, or
+        None if generating full OTA.
+
+  Returns:
+    A dict to be written into package metadata entry.
+  """
+  assert isinstance(target_info, BuildInfo)
+  assert source_info is None or isinstance(source_info, BuildInfo)
+
+  separator = '|'
+
+  boot_variable_values = {}
+  if OPTIONS.boot_variable_file:
+    d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
+    for key, values in d.items():
+      boot_variable_values[key] = [val.strip() for val in values.split(',')]
+
+  post_build_devices, post_build_fingerprints = \
+      CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
+  metadata = {
+      'post-build': separator.join(sorted(post_build_fingerprints)),
+      'post-build-incremental': target_info.GetBuildProp(
+          'ro.build.version.incremental'),
+      'post-sdk-level': target_info.GetBuildProp(
+          'ro.build.version.sdk'),
+      'post-security-patch-level': target_info.GetBuildProp(
+          'ro.build.version.security_patch'),
+  }
+
+  if target_info.is_ab and not OPTIONS.force_non_ab:
+    metadata['ota-type'] = 'AB'
+    metadata['ota-required-cache'] = '0'
+  else:
+    metadata['ota-type'] = 'BLOCK'
+
+  if OPTIONS.wipe_user_data:
+    metadata['ota-wipe'] = 'yes'
+
+  if OPTIONS.retrofit_dynamic_partitions:
+    metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+
+  is_incremental = source_info is not None
+  if is_incremental:
+    pre_build_devices, pre_build_fingerprints = \
+        CalculateRuntimeDevicesAndFingerprints(source_info,
+                                               boot_variable_values)
+    metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
+    metadata['pre-build-incremental'] = source_info.GetBuildProp(
+        'ro.build.version.incremental')
+    metadata['pre-device'] = separator.join(sorted(pre_build_devices))
+  else:
+    metadata['pre-device'] = separator.join(sorted(post_build_devices))
+
+  # Use the actual post-timestamp, even for a downgrade case.
+  metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+
+  # Detect downgrades and set up downgrade flags accordingly.
+  if is_incremental:
+    HandleDowngradeMetadata(metadata, target_info, source_info)
+
+  return metadata
+
+
+def HandleDowngradeMetadata(metadata, target_info, source_info):
+  # Only incremental OTAs are allowed to reach here.
+  assert OPTIONS.incremental_source is not None
+
+  post_timestamp = target_info.GetBuildProp("ro.build.date.utc")
+  pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
+  is_downgrade = int(post_timestamp) < int(pre_timestamp)
+
+  if OPTIONS.downgrade:
+    if not is_downgrade:
+      raise RuntimeError(
+          "--downgrade or --override_timestamp specified but no downgrade "
+          "detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+    metadata["ota-downgrade"] = "yes"
+  else:
+    if is_downgrade:
+      raise RuntimeError(
+          "Downgrade detected based on timestamp check: pre: %s, post: %s. "
+          "Need to specify --override_timestamp OR --downgrade to allow "
+          "building the incremental." % (pre_timestamp, post_timestamp))
+
+
+def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values):
+  """Returns a tuple of sets for runtime devices and fingerprints"""
+
+  device_names = {build_info.device}
+  fingerprints = {build_info.fingerprint}
+
+  if not boot_variable_values:
+    return device_names, fingerprints
+
+  # Calculate all possible combinations of the values for the boot variables.
+  keys = boot_variable_values.keys()
+  value_list = boot_variable_values.values()
+  combinations = [dict(zip(keys, values))
+                  for values in itertools.product(*value_list)]
+  for placeholder_values in combinations:
+    # Reload the info_dict as some build properties may change their values
+    # based on the value of ro.boot* properties.
+    info_dict = copy.deepcopy(build_info.info_dict)
+    for partition in PARTITIONS_WITH_CARE_MAP:
+      partition_prop_key = "{}.build.prop".format(partition)
+      input_file = info_dict[partition_prop_key].input_file
+      if isinstance(input_file, zipfile.ZipFile):
+        with zipfile.ZipFile(input_file.filename) as input_zip:
+          info_dict[partition_prop_key] = \
+              PartitionBuildProps.FromInputFile(input_zip, partition,
+                                                placeholder_values)
+      else:
+        info_dict[partition_prop_key] = \
+            PartitionBuildProps.FromInputFile(input_file, partition,
+                                              placeholder_values)
+    info_dict["build.prop"] = info_dict["system.build.prop"]
+
+    new_build_info = BuildInfo(info_dict, build_info.oem_dicts)
+    device_names.add(new_build_info.device)
+    fingerprints.add(new_build_info.fingerprint)
+  return device_names, fingerprints
+
+
+class PropertyFiles(object):
+  """A class that computes the property-files string for an OTA package.
+
+  A property-files string is a comma-separated string that contains the
+  offset/size info for an OTA package. The entries, which must be ZIP_STORED,
+  can be fetched directly with the package URL along with the offset/size info.
+  These strings can be used for streaming A/B OTAs, or allowing an updater to
+  download package metadata entry directly, without paying the cost of
+  downloading entire package.
+
+  Computing the final property-files string requires two passes. Because doing
+  the whole package signing (with signapk.jar) will possibly reorder the ZIP
+  entries, which may in turn invalidate earlier computed ZIP entry offset/size
+  values.
+
+  This class provides functions to be called for each pass. The general flow is
+  as follows.
+
+    property_files = PropertyFiles()
+    # The first pass, which writes placeholders before doing initial signing.
+    property_files.Compute()
+    SignOutput()
+
+    # The second pass, by replacing the placeholders with actual data.
+    property_files.Finalize()
+    SignOutput()
+
+  And the caller can additionally verify the final result.
+
+    property_files.Verify()
+  """
+
+  def __init__(self):
+    self.name = None
+    self.required = ()
+    self.optional = ()
+
+  def Compute(self, input_zip):
+    """Computes and returns a property-files string with placeholders.
+
+    We reserve extra space for the offset and size of the metadata entry itself,
+    although we don't know the final values until the package gets signed.
+
+    Args:
+      input_zip: The input ZIP file.
+
+    Returns:
+      A string with placeholders for the metadata offset/size info, e.g.
+      "payload.bin:679:343,payload_properties.txt:378:45,metadata:        ".
+    """
+    return self.GetPropertyFilesString(input_zip, reserve_space=True)
+
+  class InsufficientSpaceException(Exception):
+    pass
+
+  def Finalize(self, input_zip, reserved_length):
+    """Finalizes a property-files string with actual METADATA offset/size info.
+
+    The input ZIP file has been signed, with the ZIP entries in the desired
+    place (signapk.jar will possibly reorder the ZIP entries). Now we compute
+    the ZIP entry offsets and construct the property-files string with actual
+    data. Note that during this process, we must pad the property-files string
+    to the reserved length, so that the METADATA entry size remains the same.
+    Otherwise the entries' offsets and sizes may change again.
+
+    Args:
+      input_zip: The input ZIP file.
+      reserved_length: The reserved length of the property-files string during
+          the call to Compute(). The final string must be no more than this
+          size.
+
+    Returns:
+      A property-files string including the metadata offset/size info, e.g.
+      "payload.bin:679:343,payload_properties.txt:378:45,metadata:69:379  ".
+
+    Raises:
+      InsufficientSpaceException: If the reserved length is insufficient to hold
+          the final string.
+    """
+    result = self.GetPropertyFilesString(input_zip, reserve_space=False)
+    if len(result) > reserved_length:
+      raise self.InsufficientSpaceException(
+          'Insufficient reserved space: reserved={}, actual={}'.format(
+              reserved_length, len(result)))
+
+    result += ' ' * (reserved_length - len(result))
+    return result
+
+  def Verify(self, input_zip, expected):
+    """Verifies the input ZIP file contains the expected property-files string.
+
+    Args:
+      input_zip: The input ZIP file.
+      expected: The property-files string that's computed from Finalize().
+
+    Raises:
+      AssertionError: On finding a mismatch.
+    """
+    actual = self.GetPropertyFilesString(input_zip)
+    assert actual == expected, \
+        "Mismatching streaming metadata: {} vs {}.".format(actual, expected)
+
+  def GetPropertyFilesString(self, zip_file, reserve_space=False):
+    """
+    Constructs the property-files string per request.
+
+    Args:
+      zip_file: The input ZIP file.
+      reserved_length: The reserved length of the property-files string.
+
+    Returns:
+      A property-files string including the metadata offset/size info, e.g.
+      "payload.bin:679:343,payload_properties.txt:378:45,metadata:     ".
+    """
+
+    def ComputeEntryOffsetSize(name):
+      """Computes the zip entry offset and size."""
+      info = zip_file.getinfo(name)
+      offset = info.header_offset
+      offset += zipfile.sizeFileHeader
+      offset += len(info.extra) + len(info.filename)
+      size = info.file_size
+      return '%s:%d:%d' % (os.path.basename(name), offset, size)
+
+    tokens = []
+    tokens.extend(self._GetPrecomputed(zip_file))
+    for entry in self.required:
+      tokens.append(ComputeEntryOffsetSize(entry))
+    for entry in self.optional:
+      if entry in zip_file.namelist():
+        tokens.append(ComputeEntryOffsetSize(entry))
+
+    # 'META-INF/com/android/metadata' is required. We don't know its actual
+    # offset and length (as well as the values for other entries). So we reserve
+    # 15-byte as a placeholder ('offset:length'), which is sufficient to cover
+    # the space for metadata entry. Because 'offset' allows a max of 10-digit
+    # (i.e. ~9 GiB), with a max of 4-digit for the length. Note that all the
+    # reserved space serves the metadata entry only.
+    if reserve_space:
+      tokens.append('metadata:' + ' ' * 15)
+    else:
+      tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
+
+    return ','.join(tokens)
+
+  def _GetPrecomputed(self, input_zip):
+    """Computes the additional tokens to be included into the property-files.
+
+    This applies to tokens without actual ZIP entries, such as
+    payload_metadata.bin. We want to expose the offset/size to updaters, so
+    that they can download the payload metadata directly with the info.
+
+    Args:
+      input_zip: The input zip file.
+
+    Returns:
+      A list of strings (tokens) to be added to the property-files string.
+    """
+    # pylint: disable=no-self-use
+    # pylint: disable=unused-argument
+    return []
+
+
+def SignOutput(temp_zip_name, output_zip_name):
+  pw = OPTIONS.key_passwords[OPTIONS.package_key]
+
+  SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
+           whole_file=True)
diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py
new file mode 100644
index 0000000..ee1b411
--- /dev/null
+++ b/tools/releasetools/test_non_ab_ota.py
@@ -0,0 +1,169 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import copy
+import zipfile
+
+import common
+import test_utils
+
+from non_ab_ota import NonAbOtaPropertyFiles, WriteFingerprintAssertion
+from test_utils import PropertyFilesTestCase
+
+
+class NonAbOtaPropertyFilesTest(PropertyFilesTestCase):
+  """Additional validity checks specialized for NonAbOtaPropertyFiles."""
+  def setUp(self):
+     common.OPTIONS.no_signing = False
+  def test_init(self):
+    property_files = NonAbOtaPropertyFiles()
+    self.assertEqual('ota-property-files', property_files.name)
+    self.assertEqual((), property_files.required)
+    self.assertEqual((), property_files.optional)
+
+  def test_Compute(self):
+    entries = ()
+    zip_file = self.construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      property_files_string = property_files.Compute(zip_fp)
+
+    tokens = self._parse_property_files_string(property_files_string)
+    self.assertEqual(1, len(tokens))
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Finalize(self):
+    entries = [
+        'META-INF/com/android/metadata',
+    ]
+    zip_file = self.construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      raw_metadata = property_files.GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+      property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
+    tokens = self._parse_property_files_string(property_files_string)
+
+    self.assertEqual(1, len(tokens))
+    # 'META-INF/com/android/metadata' will be key'd as 'metadata'.
+    entries[0] = 'metadata'
+    self._verify_entries(zip_file, tokens, entries)
+
+  def test_Verify(self):
+    entries = (
+        'META-INF/com/android/metadata',
+    )
+    zip_file = self.construct_zip_package(entries)
+    property_files = NonAbOtaPropertyFiles()
+    with zipfile.ZipFile(zip_file) as zip_fp:
+      raw_metadata = property_files.GetPropertyFilesString(
+          zip_fp, reserve_space=False)
+
+      property_files.Verify(zip_fp, raw_metadata)
+
+class NonAbOTATest(test_utils.ReleaseToolsTestCase):
+  TEST_TARGET_INFO_DICT = {
+      'build.prop': common.PartitionBuildProps.FromDictionary(
+          'system', {
+              'ro.product.device': 'product-device',
+              'ro.build.fingerprint': 'build-fingerprint-target',
+              'ro.build.version.incremental': 'build-version-incremental-target',
+              'ro.build.version.sdk': '27',
+              'ro.build.version.security_patch': '2017-12-01',
+              'ro.build.date.utc': '1500000000'}
+      )
+  }
+  TEST_INFO_DICT_USES_OEM_PROPS = {
+      'build.prop': common.PartitionBuildProps.FromDictionary(
+          'system', {
+              'ro.product.name': 'product-name',
+              'ro.build.thumbprint': 'build-thumbprint',
+              'ro.build.bar': 'build-bar'}
+      ),
+      'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
+          'vendor', {
+               'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
+      ),
+      'property1': 'value1',
+      'property2': 4096,
+      'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
+  }
+  TEST_OEM_DICTS = [
+      {
+          'ro.product.brand': 'brand1',
+          'ro.product.device': 'device1',
+      },
+      {
+          'ro.product.brand': 'brand2',
+          'ro.product.device': 'device2',
+      },
+      {
+          'ro.product.brand': 'brand3',
+          'ro.product.device': 'device3',
+      },
+  ]
+  def test_WriteFingerprintAssertion_without_oem_props(self):
+    target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
+    source_info_dict['build.prop'].build_props['ro.build.fingerprint'] = (
+        'source-build-fingerprint')
+    source_info = common.BuildInfo(source_info_dict, None)
+
+    script_writer = test_utils.MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeFingerprint', 'source-build-fingerprint',
+          'build-fingerprint-target')],
+        script_writer.lines)
+
+  def test_WriteFingerprintAssertion_with_source_oem_props(self):
+    target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+    source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                                   self.TEST_OEM_DICTS)
+
+    script_writer = test_utils.MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
+          'build-thumbprint')],
+        script_writer.lines)
+
+  def test_WriteFingerprintAssertion_with_target_oem_props(self):
+    target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                                   self.TEST_OEM_DICTS)
+    source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
+
+    script_writer = test_utils.MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
+          'build-thumbprint')],
+        script_writer.lines)
+
+  def test_WriteFingerprintAssertion_with_both_oem_props(self):
+    target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
+                                   self.TEST_OEM_DICTS)
+    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
+    source_info_dict['build.prop'].build_props['ro.build.thumbprint'] = (
+        'source-build-thumbprint')
+    source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
+
+    script_writer = test_utils.MockScriptWriter()
+    WriteFingerprintAssertion(script_writer, target_info, source_info)
+    self.assertEqual(
+        [('AssertSomeThumbprint', 'build-thumbprint',
+          'source-build-thumbprint')],
+        script_writer.lines)
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 07b2e05..52aa487 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -21,14 +21,15 @@
 
 import common
 import test_utils
+from ota_utils import CalculateRuntimeDevicesAndFingerprints
 from ota_from_target_files import (
     _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
     GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
-    GetTargetFilesZipWithoutPostinstallConfig, NonAbOtaPropertyFiles,
+    GetTargetFilesZipWithoutPostinstallConfig,
     Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
-    StreamingPropertyFiles, WriteFingerprintAssertion,
-    CalculateRuntimeDevicesAndFingerprints)
-
+    StreamingPropertyFiles)
+from non_ab_ota import NonAbOtaPropertyFiles
+from test_utils import PropertyFilesTestCase
 
 def construct_target_files(secondary=False):
   """Returns a target-files.zip file for generating OTA packages."""
@@ -149,20 +150,6 @@
       'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
   }
 
-  TEST_OEM_DICTS = [
-      {
-          'ro.product.brand': 'brand1',
-          'ro.product.device': 'device1',
-      },
-      {
-          'ro.product.brand': 'brand2',
-          'ro.product.device': 'device2',
-      },
-      {
-          'ro.product.brand': 'brand3',
-          'ro.product.device': 'device3',
-      },
-  ]
 
   def setUp(self):
     self.testdata_dir = test_utils.get_testdata_dir()
@@ -529,59 +516,6 @@
     FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
     self.assertIn('ota-test-property-files', metadata)
 
-  def test_WriteFingerprintAssertion_without_oem_props(self):
-    target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
-    source_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
-    source_info_dict['build.prop'].build_props['ro.build.fingerprint'] = (
-        'source-build-fingerprint')
-    source_info = common.BuildInfo(source_info_dict, None)
-
-    script_writer = test_utils.MockScriptWriter()
-    WriteFingerprintAssertion(script_writer, target_info, source_info)
-    self.assertEqual(
-        [('AssertSomeFingerprint', 'source-build-fingerprint',
-          'build-fingerprint-target')],
-        script_writer.lines)
-
-  def test_WriteFingerprintAssertion_with_source_oem_props(self):
-    target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
-    source_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
-                                   self.TEST_OEM_DICTS)
-
-    script_writer = test_utils.MockScriptWriter()
-    WriteFingerprintAssertion(script_writer, target_info, source_info)
-    self.assertEqual(
-        [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
-          'build-thumbprint')],
-        script_writer.lines)
-
-  def test_WriteFingerprintAssertion_with_target_oem_props(self):
-    target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
-                                   self.TEST_OEM_DICTS)
-    source_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
-
-    script_writer = test_utils.MockScriptWriter()
-    WriteFingerprintAssertion(script_writer, target_info, source_info)
-    self.assertEqual(
-        [('AssertFingerprintOrThumbprint', 'build-fingerprint-target',
-          'build-thumbprint')],
-        script_writer.lines)
-
-  def test_WriteFingerprintAssertion_with_both_oem_props(self):
-    target_info = common.BuildInfo(self.TEST_INFO_DICT_USES_OEM_PROPS,
-                                   self.TEST_OEM_DICTS)
-    source_info_dict = copy.deepcopy(self.TEST_INFO_DICT_USES_OEM_PROPS)
-    source_info_dict['build.prop'].build_props['ro.build.thumbprint'] = (
-        'source-build-thumbprint')
-    source_info = common.BuildInfo(source_info_dict, self.TEST_OEM_DICTS)
-
-    script_writer = test_utils.MockScriptWriter()
-    WriteFingerprintAssertion(script_writer, target_info, source_info)
-    self.assertEqual(
-        [('AssertSomeThumbprint', 'build-thumbprint',
-          'source-build-thumbprint')],
-        script_writer.lines)
-
 
 class TestPropertyFiles(PropertyFiles):
   """A class that extends PropertyFiles for testing purpose."""
@@ -598,41 +532,8 @@
         'optional-entry2',
     )
 
+class PropertyFilesTest(PropertyFilesTestCase):
 
-class PropertyFilesTest(test_utils.ReleaseToolsTestCase):
-
-  def setUp(self):
-    common.OPTIONS.no_signing = False
-
-  @staticmethod
-  def construct_zip_package(entries):
-    zip_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
-      for entry in entries:
-        zip_fp.writestr(
-            entry,
-            entry.replace('.', '-').upper(),
-            zipfile.ZIP_STORED)
-    return zip_file
-
-  @staticmethod
-  def _parse_property_files_string(data):
-    result = {}
-    for token in data.split(','):
-      name, info = token.split(':', 1)
-      result[name] = info
-    return result
-
-  def _verify_entries(self, input_file, tokens, entries):
-    for entry in entries:
-      offset, size = map(int, tokens[entry].split(':'))
-      with open(input_file, 'rb') as input_fp:
-        input_fp.seek(offset)
-        if entry == 'metadata':
-          expected = b'META-INF/COM/ANDROID/METADATA'
-        else:
-          expected = entry.replace('.', '-').upper().encode()
-        self.assertEqual(expected, input_fp.read(size))
 
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_Compute(self):
@@ -753,7 +654,7 @@
           AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x')
 
 
-class StreamingPropertyFilesTest(PropertyFilesTest):
+class StreamingPropertyFilesTest(PropertyFilesTestCase):
   """Additional validity checks specialized for StreamingPropertyFiles."""
 
   def test_init(self):
@@ -834,7 +735,7 @@
           AssertionError, property_files.Verify, zip_fp, raw_metadata + 'x')
 
 
-class AbOtaPropertyFilesTest(PropertyFilesTest):
+class AbOtaPropertyFilesTest(PropertyFilesTestCase):
   """Additional validity checks specialized for AbOtaPropertyFiles."""
 
   # The size for payload and metadata signature size.
@@ -1002,56 +903,6 @@
       property_files.Verify(zip_fp, raw_metadata)
 
 
-class NonAbOtaPropertyFilesTest(PropertyFilesTest):
-  """Additional validity checks specialized for NonAbOtaPropertyFiles."""
-
-  def test_init(self):
-    property_files = NonAbOtaPropertyFiles()
-    self.assertEqual('ota-property-files', property_files.name)
-    self.assertEqual((), property_files.required)
-    self.assertEqual((), property_files.optional)
-
-  def test_Compute(self):
-    entries = ()
-    zip_file = self.construct_zip_package(entries)
-    property_files = NonAbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file) as zip_fp:
-      property_files_string = property_files.Compute(zip_fp)
-
-    tokens = self._parse_property_files_string(property_files_string)
-    self.assertEqual(1, len(tokens))
-    self._verify_entries(zip_file, tokens, entries)
-
-  def test_Finalize(self):
-    entries = [
-        'META-INF/com/android/metadata',
-    ]
-    zip_file = self.construct_zip_package(entries)
-    property_files = NonAbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file) as zip_fp:
-      raw_metadata = property_files.GetPropertyFilesString(
-          zip_fp, reserve_space=False)
-      property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
-    tokens = self._parse_property_files_string(property_files_string)
-
-    self.assertEqual(1, len(tokens))
-    # 'META-INF/com/android/metadata' will be key'd as 'metadata'.
-    entries[0] = 'metadata'
-    self._verify_entries(zip_file, tokens, entries)
-
-  def test_Verify(self):
-    entries = (
-        'META-INF/com/android/metadata',
-    )
-    zip_file = self.construct_zip_package(entries)
-    property_files = NonAbOtaPropertyFiles()
-    with zipfile.ZipFile(zip_file) as zip_fp:
-      raw_metadata = property_files.GetPropertyFilesString(
-          zip_fp, reserve_space=False)
-
-      property_files.Verify(zip_fp, raw_metadata)
-
-
 class PayloadSignerTest(test_utils.ReleaseToolsTestCase):
 
   SIGFILE = 'sigfile.bin'
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index e999757..65092d8 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -25,6 +25,7 @@
 import struct
 import sys
 import unittest
+import zipfile
 
 import common
 
@@ -192,6 +193,41 @@
   def tearDown(self):
     common.Cleanup()
 
+class PropertyFilesTestCase(ReleaseToolsTestCase):
+
+  @staticmethod
+  def construct_zip_package(entries):
+    zip_file = common.MakeTempFile(suffix='.zip')
+    with zipfile.ZipFile(zip_file, 'w') as zip_fp:
+      for entry in entries:
+        zip_fp.writestr(
+            entry,
+            entry.replace('.', '-').upper(),
+            zipfile.ZIP_STORED)
+    return zip_file
+
+  @staticmethod
+  def _parse_property_files_string(data):
+    result = {}
+    for token in data.split(','):
+      name, info = token.split(':', 1)
+      result[name] = info
+    return result
+
+  def setUp(self):
+    common.OPTIONS.no_signing = False
+
+  def _verify_entries(self, input_file, tokens, entries):
+    for entry in entries:
+      offset, size = map(int, tokens[entry].split(':'))
+      with open(input_file, 'rb') as input_fp:
+        input_fp.seek(offset)
+        if entry == 'metadata':
+          expected = b'META-INF/COM/ANDROID/METADATA'
+        else:
+          expected = entry.replace('.', '-').upper().encode()
+        self.assertEqual(expected, input_fp.read(size))
+
 
 if __name__ == '__main__':
   testsuite = unittest.TestLoader().discover(