Merge "Use rules to create partition compatibility symlinks"
diff --git a/core/Makefile b/core/Makefile
index 432b363..73f2f7a 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -584,6 +584,10 @@
 # #################################################################
 ifneq ($(strip $(TARGET_NO_BOOTLOADER)),true)
   INSTALLED_BOOTLOADER_MODULE := $(PRODUCT_OUT)/bootloader
+  ifdef BOARD_PREBUILT_BOOTLOADER
+    $(eval $(call copy-one-file,$(BOARD_PREBUILT_BOOTLOADER),$(INSTALLED_BOOTLOADER_MODULE)))
+    $(call dist-for-goals,dist_files,$(INSTALLED_BOOTLOADER_MODULE))
+  endif # BOARD_PREBUILT_BOOTLOADER
   ifeq ($(strip $(TARGET_BOOTLOADER_IS_2ND)),true)
     INSTALLED_2NDBOOTLOADER_TARGET := $(PRODUCT_OUT)/2ndbootloader
   else
@@ -688,6 +692,14 @@
   BUILT_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
 endif
 
+# $1: boot image target
+# returns the kernel used to make the bootimage
+define bootimage-to-kernel
+  $(if $(BOARD_KERNEL_BINARIES),\
+    $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $(1)))),\
+    $(INSTALLED_KERNEL_TARGET))
+endef
+
 ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
   BOARD_KERNEL_BOOTIMAGE_PARTITION_SIZE := $(BOARD_BOOTIMAGE_PARTITION_SIZE)
 endif
@@ -700,8 +712,7 @@
 
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 INTERNAL_BOOTIMAGE_ARGS := \
-	$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET)) \
-	--kernel $(INSTALLED_KERNEL_TARGET)
+	$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
 
 ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
@@ -754,69 +765,78 @@
 
 else ifeq (true,$(BOARD_AVB_ENABLE)) # TARGET_BOOTIMAGE_USE_EXT2 != true
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH)
+# $1: boot image target
+define build_boot_board_avb_enabled
+  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+  $(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
+  $(AVBTOOL) add_hash_footer \
+          --image $(1) \
+          --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
+          --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
+          $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+endef
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(AVBTOOL) $(INTERNAL_BOOTIMAGE_FILES) $(BOARD_AVB_BOOT_KEY_PATH) $(call bootimage-to-kernel,$@)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(hide) $(call assert-max-image-size,$@,$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
-	$(hide) $(AVBTOOL) add_hash_footer \
-	  --image $@ \
-	  --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-	  --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
-	  $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+	$(call build_boot_board_avb_enabled,$@)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
-	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(call get-hash-image-max-size,$(BOARD_BOOTIMAGE_PARTITION_SIZE)))
-	$(hide) $(AVBTOOL) add_hash_footer \
-	  --image $(INSTALLED_BOOTIMAGE_TARGET) \
-	  --partition_size $(BOARD_BOOTIMAGE_PARTITION_SIZE) \
-	  --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
-	  $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
+	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
 
 else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
+# $1: boot image target
+define build_boot_supports_boot_signer
+  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+  $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)
+  $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+endef
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER) $(call bootimage-to-kernel,$@)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
-	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(call build_boot_supports_boot_signer,$@)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
-	$(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
-	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_boot_signer,$(b)))
 
 else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY)
+# $1: boot image target
+define build_boot_supports_vboot
+  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned
+  $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1)
+  $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+endef
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER) $(FUTILITY) $(call bootimage-to-kernel,$@)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned
-	$(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@
-	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(call build_boot_supports_vboot,$@)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned
-	$(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET)
-	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_vboot,$(b)))
 
 else # PRODUCT_SUPPORTS_VBOOT != true
 
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
+# $1: boot image target
+define build_boot_novboot
+  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
+  $(call assert-max-image-size,$1,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+endef
+
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(call bootimage-to-kernel,$@)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(call build_boot_novboot,$@)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
-	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
+	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_novboot,$(b)))
 
 endif # TARGET_BOOTIMAGE_USE_EXT2
 endif # BUILDING_BOOT_IMAGE
@@ -1582,7 +1602,9 @@
 # SELinux files
 IGNORE_RECOVERY_SEPOLICY := $(patsubst $(TARGET_RECOVERY_OUT)/%,--exclude=/%,$(recovery_sepolicy))
 
-recovery_kernel := $(INSTALLED_KERNEL_TARGET) # same as a non-recovery system
+# if building multiple boot images from multiple kernels, use the first kernel listed
+# for the recovery image
+recovery_kernel := $(firstword $(INSTALLED_KERNEL_TARGET))
 recovery_ramdisk := $(PRODUCT_OUT)/ramdisk-recovery.img
 recovery_resources_common := bootable/recovery/res
 
@@ -1951,7 +1973,7 @@
 .PHONY: recoveryimage-nodeps
 recoveryimage-nodeps:
 	@echo "make $@: ignoring dependencies"
-	$(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET))
+	$(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET), $(recovery_kernel))
 
 else # BUILDING_RECOVERY_IMAGE
 RECOVERY_RESOURCE_ZIP :=
@@ -2394,11 +2416,11 @@
 $(RECOVERY_FROM_BOOT_PATCH): PRIVATE_DIFF_TOOL := $(diff_tool)
 $(RECOVERY_FROM_BOOT_PATCH): \
 	    $(INSTALLED_RECOVERYIMAGE_TARGET) \
-	    $(INSTALLED_BOOTIMAGE_TARGET) \
+	    $(firstword $(INSTALLED_BOOTIMAGE_TARGET)) \
 	    $(diff_tool)
 	@echo "Construct recovery from boot"
 	mkdir -p $(dir $@)
-	$(PRIVATE_DIFF_TOOL) $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
+	$(PRIVATE_DIFF_TOOL) $(firstword $(INSTALLED_BOOTIMAGE_TARGET)) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
 else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true
 RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET)
 endif # BOARD_USES_FULL_RECOVERY_IMAGE
@@ -4091,6 +4113,9 @@
 ifdef DEVICE_MANIFEST_FILE
 	$(hide) echo "vintf_include_empty_vendor_sku=true" >> $@
 endif
+ifeq ($(BOARD_BOOTLOADER_IN_UPDATE_PACKAGE),true)
+	$(hide) echo "bootloader_in_update_package=true" >> $@
+endif
 
 .PHONY: misc_info
 misc_info: $(INSTALLED_MISC_INFO_TARGET)
@@ -4247,6 +4272,11 @@
 $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
 endif
 
+ifdef BOARD_PREBUILT_BOOTLOADER
+$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+droidcore: $(INSTALLED_BOOTLOADER_MODULE)
+endif
+
 # Depending on the various images guarantees that the underlying
 # directories are up-to-date.
 $(BUILT_TARGET_FILES_PACKAGE): \
@@ -4302,7 +4332,11 @@
 	$(hide) $(call package_files-copy-root, \
 	    $(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
 ifdef INSTALLED_KERNEL_TARGET
+ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 	cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/
+else # BOARD_USES_RECOVERY_AS_BOOT not true
+	cp $(firstword $(INSTALLED_KERNEL_TARGET)) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
+endif
 endif
 ifeq (truetrue,$(strip $(BUILDING_VENDOR_BOOT_IMAGE))$(strip $(AB_OTA_UPDATER)))
 	echo "$(GENERIC_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
@@ -4332,7 +4366,7 @@
 ifdef BOARD_KERNEL_PAGESIZE
 	echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize
 endif
-endif # INSTALLED_VENDOR_BOOTIMAGE_TARGET not defined
+endif # not (BUILDING_VENDOR_BOOT_IMAGE and AB_OTA_UPDATER)
 endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
 	@# Components of the boot image
 	$(hide) mkdir -p $(zip_root)/BOOT
@@ -4346,7 +4380,7 @@
 	    $(TARGET_RAMDISK_OUT),$(zip_root)/BOOT/RAMDISK)
 endif
 ifdef INSTALLED_KERNEL_TARGET
-	$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
+	$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/
 endif
 ifndef INSTALLED_VENDOR_BOOTIMAGE_TARGET
 ifdef INSTALLED_2NDBOOTLOADER_TARGET
@@ -4522,14 +4556,22 @@
 	$(hide) mkdir -p $(zip_root)/IMAGES
 	$(hide) cp $(INSTALLED_ODMIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
-ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
 	$(hide) mkdir -p $(zip_root)/IMAGES
-	$(hide) cp $(INSTALLED_VENDOR_DLKIMMAGE_TARGET) $(zip_root)/IMAGES/
+	$(hide) cp $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
+endif
+ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_ODM_DLKMIMAGE_TARGET) $(zip_root)/IMAGES/
 endif
 ifdef BOARD_PREBUILT_DTBOIMAGE
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
+ifdef BOARD_PREBUILT_BOOTLOADER
+	$(hide) mkdir -p $(zip_root)/IMAGES
+	$(hide) cp $(INSTALLED_BOOTLOADER_MODULE) $(zip_root)/IMAGES/
+endif
 ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
@@ -4839,6 +4881,31 @@
 	    sed -e 's/\(.*\)\/proguard_dictionary/\0\n\1\/classes.jar/' > $(PRIVATE_LIST_FILE)
 	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
 
+#------------------------------------------------------------------
+# A zip of Proguard usage files.
+#
+PROGUARD_USAGE_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage-$(FILE_NAME_TAG).zip
+# For apps_only build we'll establish the dependency later in build/make/core/main.mk.
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(PROGUARD_USAGE_ZIP): \
+    $(INSTALLED_SYSTEMIMAGE_TARGET) \
+    $(INSTALLED_RAMDISK_TARGET) \
+    $(INSTALLED_BOOTIMAGE_TARGET) \
+    $(INSTALLED_USERDATAIMAGE_TARGET) \
+    $(INSTALLED_VENDORIMAGE_TARGET) \
+    $(INSTALLED_PRODUCTIMAGE_TARGET) \
+    $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
+    $(INSTALLED_ODMIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
+    $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+    $(updater_dep)
+endif
+$(PROGUARD_USAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_usage)/filelist
+$(PROGUARD_USAGE_ZIP): $(MERGE_ZIPS)
+	@echo "Packaging Proguard usage files."
+	mkdir -p $(dir $@) $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS $(dir $(PRIVATE_LIST_FILE))
+	find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
+	$(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
 
 ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
 
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ddf736b..cb6cadc 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -115,6 +115,7 @@
 
 include $(BUILD_SYSTEM)/local_vndk.mk
 include $(BUILD_SYSTEM)/local_systemsdk.mk
+include $(BUILD_SYSTEM)/local_current_sdk.mk
 
 my_module_tags := $(LOCAL_MODULE_TAGS)
 ifeq ($(my_host_cross),true)
@@ -515,7 +516,11 @@
 $(LOCAL_INSTALLED_MODULE): PRIVATE_POST_INSTALL_CMD := $(LOCAL_POST_INSTALL_CMD)
 $(LOCAL_INSTALLED_MODULE): $(LOCAL_BUILT_MODULE)
 	@echo "Install: $@"
+ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+	$(copy-file-or-link-to-new-target)
+else
 	$(copy-file-to-new-target)
+endif
 	$(PRIVATE_POST_INSTALL_CMD)
 endif
 
@@ -757,6 +762,13 @@
         $(test_config):$(dir)/$(LOCAL_MODULE).config)))
   endif
 
+  ifneq (,$(LOCAL_EXTRA_FULL_TEST_CONFIGS))
+    $(foreach test_config_file, $(LOCAL_EXTRA_FULL_TEST_CONFIGS), \
+      $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
+        $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
+          $(test_config_file):$(dir)/$(notdir $(test_config_file))))))
+  endif
+
   ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
     $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
       $(eval my_compat_dist_config_$(suite) += $(foreach dir, $(call compatibility_suite_dirs,$(suite)), \
@@ -973,6 +985,7 @@
 ALL_MODULES.$(my_register_name).MODULE_NAME := $(LOCAL_MODULE)
 ALL_MODULES.$(my_register_name).COMPATIBILITY_SUITES := $(LOCAL_COMPATIBILITY_SUITE)
 ALL_MODULES.$(my_register_name).TEST_CONFIG := $(test_config)
+ALL_MODULES.$(my_register_name).EXTRA_TEST_CONFIGS := $(LOCAL_EXTRA_FULL_TEST_CONFIGS)
 test_config :=
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/board_config.mk b/core/board_config.mk
index b7d0178..95d8af8 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -20,6 +20,7 @@
 # ###############################################################
 
 _board_strip_readonly_list := \
+  BOARD_BOOTLOADER_IN_UPDATE_PACKAGE \
   BOARD_EGL_CFG \
   BOARD_HAVE_BLUETOOTH \
   BOARD_INSTALLER_CMDLINE \
@@ -289,7 +290,7 @@
 
 ###########################################
 # Now we can substitute with the real value of TARGET_COPY_OUT_DEBUG_RAMDISK
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq (,$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT) $(BOARD_GKI_NONAB_COMPAT)))
 TARGET_COPY_OUT_DEBUG_RAMDISK := debug_ramdisk/first_stage_ramdisk
 TARGET_COPY_OUT_VENDOR_DEBUG_RAMDISK := vendor_debug_ramdisk/first_stage_ramdisk
 TARGET_COPY_OUT_TEST_HARNESS_RAMDISK := test_harness_ramdisk/first_stage_ramdisk
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 307c2c2..9c29974 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -98,6 +98,7 @@
 LOCAL_EXPORT_SDK_LIBRARIES:=
 LOCAL_EXPORT_SHARED_LIBRARY_HEADERS:=
 LOCAL_EXPORT_STATIC_LIBRARY_HEADERS:=
+LOCAL_EXTRA_FULL_TEST_CONFIGS:=
 LOCAL_EXTRACT_APK:=
 LOCAL_EXTRACT_DPI_APK:=
 LOCAL_FDO_SUPPORT:=
@@ -275,6 +276,7 @@
 LOCAL_SOONG_LINK_TYPE :=
 LOCAL_SOONG_LINT_REPORTS :=
 LOCAL_SOONG_PROGUARD_DICT :=
+LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
 LOCAL_SOONG_DEVICE_RRO_DIRS :=
 LOCAL_SOONG_PRODUCT_RRO_DIRS :=
diff --git a/core/config.mk b/core/config.mk
index 57296d8..a286769 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -720,6 +720,16 @@
   endif
 endif
 
+ifndef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+  BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES := current
+else
+  ifdef PRODUCT_SHIPPING_API_LEVEL
+    ifneq ($(call math_lt,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES),$(PRODUCT_SHIPPING_API_LEVEL)),)
+      $(error BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES ($(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)) must be greater than or equal to PRODUCT_SHIPPING_API_LEVEL ($(PRODUCT_SHIPPING_API_LEVEL)))
+    endif
+  endif
+endif
+.KATI_READONLY := BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
 
 ifdef PRODUCT_SHIPPING_API_LEVEL
   ifneq ($(call numbers_less_than,$(PRODUCT_SHIPPING_API_LEVEL),$(BOARD_SYSTEMSDK_VERSIONS)),)
diff --git a/core/definitions.mk b/core/definitions.mk
index 2bf1ba6..ace3ff8 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -110,6 +110,9 @@
 # All compatibility suites mentioned in LOCAL_COMPATIBILITY_SUITES
 ALL_COMPATIBILITY_SUITES :=
 
+# All compatibility suite files to dist.
+ALL_COMPATIBILITY_DIST_FILES :=
+
 # All LINK_TYPE entries
 ALL_LINK_TYPES :=
 
@@ -2551,6 +2554,18 @@
 $(hide) cp $< $@
 endef
 
+# The same as copy-file-to-new-target, but preserve symlinks. Symlinks are
+# converted to absolute to not break.
+define copy-file-or-link-to-new-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) if [ -h $< ]; then \
+  ln -s $$(realpath $<) $@; \
+else \
+  cp $< $@; \
+fi
+endef
+
 # Copy a prebuilt file to a target location.
 define transform-prebuilt-to-target
 @echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
@@ -2563,6 +2578,13 @@
 $(copy-file-to-target-strip-comments)
 endef
 
+# Copy a prebuilt file to a target location, but preserve symlinks rather than
+# dereference them.
+define copy-or-link-prebuilt-to-target
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Prebuilt: $(PRIVATE_MODULE) ($@)"
+$(copy-file-or-link-to-new-target)
+endef
+
 # Copy a list of files/directories to target location, with sub dir structure preserved.
 # For example $(HOST_OUT_EXECUTABLES)/aapt -> $(staging)/bin/aapt .
 # $(1): the source list of files/directories.
@@ -2812,6 +2834,7 @@
 # 2. Add all the files to each suite's dependent files list.
 # 3. Do the dependency addition to my_all_targets.
 # 4. Save the module name to COMPATIBILITY.$(suite).MODULES for each suite.
+# 5. Collect files to dist to ALL_COMPATIBILITY_DIST_FILES.
 # Requires for each suite: use my_compat_dist_config_$(suite) to define the test config.
 #    and use my_compat_dist_$(suite) to define the others.
 define create-suite-dependencies
@@ -2824,9 +2847,11 @@
     $$(foreach f,$$(my_compat_dist_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call word-colon,2,$$(f))) \
     $$(my_compat_dist_test_data_$(suite))) \
+  $(eval ALL_COMPATIBILITY_DIST_FILES += $$(my_compat_dist_$(suite))) \
   $(eval COMPATIBILITY.$(suite).MODULES += $$(my_register_name))) \
-$(eval $(my_all_targets) : $(call copy-many-files, \
-  $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_$(suite))))) \
+$(eval $(my_all_targets) : \
+  $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE), \
+    $(foreach f,$(my_compat_dist_$(suite)), $(call word-colon,2,$(f))))) \
   $(call copy-many-xml-files-checked, \
     $(sort $(foreach suite,$(LOCAL_COMPATIBILITY_SUITE),$(my_compat_dist_config_$(suite))))))
 endef
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 167fed9..76e7dd3 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -328,7 +328,7 @@
 HOST_OUT := $(HOST_OUT_ROOT)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
 SOONG_HOST_OUT := $(SOONG_OUT_DIR)/host/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
 
-HOST_CROSS_OUT := $(HOST_OUT_ROOT)/windows-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT := $(HOST_OUT_ROOT)/$(HOST_CROSS_OS)-$(HOST_CROSS_ARCH)
 
 .KATI_READONLY := HOST_OUT SOONG_HOST_OUT HOST_CROSS_OUT
 
diff --git a/core/local_current_sdk.mk b/core/local_current_sdk.mk
new file mode 100644
index 0000000..ea7da8a
--- /dev/null
+++ b/core/local_current_sdk.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2020 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+ifdef BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES
+  ifneq (current,$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
+    ifneq (,$(filter true,$(LOCAL_VENDOR_MODULE) $(LOCAL_ODM_MODULE) $(LOCAL_PROPRIETARY_MODULE)))
+      ifeq (current,$(LOCAL_SDK_VERSION))
+        LOCAL_SDK_VERSION := $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+      else ifeq (system_current,$(LOCAL_SDK_VERSION))
+        LOCAL_SDK_VERSION := system_$(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES)
+      endif
+    endif
+  endif
+endif
diff --git a/core/main.mk b/core/main.mk
index 776acf1..ebee25a 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1406,6 +1406,10 @@
 test_files :=
 endif
 
+# Dedpulicate compatibility suite dist files across modules and packages before
+# copying them to their requested locations. Assign the eval result to an unused
+# var to prevent Make from trying to make a sense of it.
+_unused := $(call copy-many-files, $(sort $(ALL_COMPATIBILITY_DIST_FILES)))
 
 # Don't include any GNU General Public License shared objects or static
 # libraries in SDK images.  GPL executables (not static/dynamic libraries)
@@ -1678,6 +1682,9 @@
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
 
+  $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
+  $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
+
   $(SYMBOLS_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
 
@@ -1706,6 +1713,7 @@
     $(BUILT_OTATOOLS_PACKAGE) \
     $(SYMBOLS_ZIP) \
     $(PROGUARD_DICT_ZIP) \
+    $(PROGUARD_USAGE_ZIP) \
     $(COVERAGE_ZIP) \
     $(APPCOMPAT_ZIP) \
     $(INSTALLED_FILES_FILE) \
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index b994b17..50ac93a 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -7,6 +7,7 @@
 # LOCAL_SOONG_HEADER_JAR
 # LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
 # LOCAL_SOONG_PROGUARD_DICT
+# LOCAL_SOONG_PROGUARD_USAGE
 # LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
 # LOCAL_SOONG_RRO_DIRS
 # LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
@@ -85,6 +86,13 @@
     $(intermediates.COMMON)/proguard_dictionary)
 endif
 
+ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
+    $(intermediates.COMMON)/proguard_usage.zip))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/proguard_usage.zip)
+endif
+
 ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
 resource_export_package := $(intermediates.COMMON)/package-export.apk
 resource_export_stamp := $(intermediates.COMMON)/src/R.stamp
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index c9b742a..a0315a5 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -142,8 +142,16 @@
   $(LOCAL_BUILT_MODULE): $(same_vndk_variants_stamp)
 endif
 
+# Use copy-or-link-prebuilt-to-target for host executables and shared libraries,
+# to preserve symlinks to the source trees. They can then run directly from the
+# prebuilt directories where the linker can load their dependencies using
+# relative RUNPATHs.
 $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+	$(copy-or-link-prebuilt-to-target)
+else
 	$(transform-prebuilt-to-target)
+endif
 ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
 	$(hide) chmod +x $@
 endif
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 4731250..ad2e816 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -210,6 +210,7 @@
 $(call end_json_map)
 
 $(call add_json_bool, EnforceProductPartitionInterface,  $(PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE))
+$(call add_json_str,  DeviceCurrentApiLevelForVendorModules,  $(BOARD_CURRENT_API_LEVEL_FOR_VENDOR_MODULES))
 
 $(call add_json_bool, InstallExtraFlattenedApexes, $(PRODUCT_INSTALL_EXTRA_FLATTENED_APEXES))
 
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 05f700d..5444d96 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -58,6 +58,14 @@
     $(intermediates.COMMON)/proguard_dictionary)
 endif
 
+ifdef LOCAL_SOONG_PROGUARD_USAGE
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_USAGE_ZIP),\
+    $(intermediates.COMMON)/proguard_usage.zip))
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(intermediates.COMMON)/proguard_usage.zip)
+endif
+
+
 ifdef LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE
   my_res_package := $(intermediates.COMMON)/package-res.apk
 
diff --git a/core/soong_rust_prebuilt.mk b/core/soong_rust_prebuilt.mk
index 804e37e..de6bafd 100644
--- a/core/soong_rust_prebuilt.mk
+++ b/core/soong_rust_prebuilt.mk
@@ -57,7 +57,11 @@
 endif
 
 $(LOCAL_BUILT_MODULE): $(LOCAL_PREBUILT_MODULE_FILE)
+ifeq ($(LOCAL_IS_HOST_MODULE) $(if $(filter EXECUTABLES SHARED_LIBRARIES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),true,),true true)
+	$(copy-or-link-prebuilt-to-target)
+else
 	$(transform-prebuilt-to-target)
+endif
 ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
 	$(hide) chmod +x $@
 endif
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index f6cec15..d9aebed 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -14,7 +14,7 @@
 			'"compatibility_suites": [$(foreach w,$(sort $(ALL_MODULES.$(m).COMPATIBILITY_SUITES)),"$(w)", )], ' \
 			'"auto_test_config": [$(ALL_MODULES.$(m).auto_test_config)], ' \
 			'"module_name": "$(ALL_MODULES.$(m).MODULE_NAME)", ' \
-			'"test_config": [$(if $(ALL_MODULES.$(m).TEST_CONFIG),"$(ALL_MODULES.$(m).TEST_CONFIG)")], ' \
+			'"test_config": [$(foreach w,$(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS)),"$(w)", )], ' \
 			'"dependencies": [$(foreach w,$(sort $(ALL_DEPS.$(m).ALL_DEPS)),"$(w)", )], ' \
 			'"srcs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)", )], ' \
 			'"srcjars": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)", )], ' \
diff --git a/core/tasks/platform_availability_check.mk b/core/tasks/platform_availability_check.mk
index 043d130..f252ff5 100644
--- a/core/tasks/platform_availability_check.mk
+++ b/core/tasks/platform_availability_check.mk
@@ -26,11 +26,31 @@
       $(if $(filter true,$(ALL_MODULES.$(m).NOT_AVAILABLE_FOR_PLATFORM)),\
         $(m))))))
 
-_violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
+ifndef ALLOW_MISSING_DEPENDENCIES
+  _violators_with_path := $(foreach m,$(sort $(_modules_not_available_for_platform)),\
     $(m):$(word 1,$(ALL_MODULES.$(m).PATH))\
-)
+  )
 
-$(call maybe-print-list-and-error,$(_violators_with_path),\
+  $(call maybe-print-list-and-error,$(_violators_with_path),\
 Following modules are requested to be installed. But are not available \
 for platform because they do not have "//apex_available:platform" or \
 they depend on other modules that are not available for platform)
+
+else
+
+# Don't error out immediately when ALLOW_MISSING_DEPENDENCIES is set.
+# Instead, add a dependency on a rule that prints the error message.
+  define not_available_for_platform_rule
+    not_installable_file := $(patsubst $(OUT_DIR)/%,$(OUT_DIR)/NOT_AVAILABLE_FOR_PLATFORM/%,$(1)))
+    $(1): $$(not_installable_file)
+    $$(not_installable_file):
+	$(call echo-error,$(2),Module is requested to be installed but is not \
+available for platform because it does not have "//apex_available:platform" or \
+it depends on other modules that are not available for platform.)
+	exit 1
+  endef
+
+  $(foreach m,$(_modules_not_available_for_platform),\
+    $(foreach i,$(ALL_MODULES.$(m).INSTALLED),\
+      $(eval $(call not_available_for_platform_rule,$(i),$(m)))))
+endif
diff --git a/rbesetup.sh b/rbesetup.sh
index adcf081..724ad7d 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -36,6 +36,7 @@
 # This function sets RBE specific environment variables needed for the build to
 # executed by RBE. This file should be sourced once per checkout of Android code.
 function _set_rbe_vars() {
+  unset USE_GOMA
   export USE_RBE="true"
   export RBE_CXX_EXEC_STRATEGY="remote_local_fallback"
   export RBE_JAVAC=1
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 2f41540..c20c782 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -70,6 +70,7 @@
 
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 
+BOARD_RAMDISK_USE_LZ4 := true
 BOARD_BOOT_HEADER_VERSION := 3
 BOARD_MKBOOTIMG_ARGS += --header_version $(BOARD_BOOT_HEADER_VERSION)
 
@@ -78,6 +79,7 @@
 ifneq (,$(filter userdebug eng,$(TARGET_BUILD_VARIANT)))
 BOARD_KERNEL_BINARIES += kernel-5.4-allsyms kernel-5.4-gz-allsyms kernel-5.4-lz4-allsyms
 endif
+BOARD_KERNEL_MODULE_INTERFACE_VERSIONS := 5.4-android12-0
 
 # Some vendors still haven't cleaned up all device specific directories under
 # root!
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index f6770fb..716f00f 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -352,6 +352,8 @@
 
 PRODUCT_SYSTEM_PROPERTIES += debug.atrace.tags.enableflags=0
 
+PRODUCT_PROPERTY_OVERRIDES += ro.gfx.angle.supported=true
+
 # Packages included only for eng or userdebug builds, previously debug tagged
 PRODUCT_PACKAGES_DEBUG := \
     adb_keys \
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index ace00ac..7f727fb 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -49,6 +49,7 @@
     required: [
         "blk_alloc_to_base_fs",
         "e2fsck",
+        "mkuserimg_mke2fs",
         "simg2img",
         "tune2fs",
     ],
@@ -88,16 +89,35 @@
     ],
 }
 
+python_library_host {
+    name: "ota_metadata_proto",
+    version: {
+        py2: {
+            enabled: true,
+        },
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+       "ota_metadata.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
+
 python_defaults {
     name: "releasetools_ota_from_target_files_defaults",
     srcs: [
         "edify_generator.py",
-        "ota_from_target_files.py",
         "non_ab_ota.py",
-        "target_files_diff.py",
+        "ota_from_target_files.py",
         "ota_utils.py",
+        "target_files_diff.py",
     ],
     libs: [
+        "ota_metadata_proto",
         "releasetools_check_target_files_vintf",
         "releasetools_common",
         "releasetools_verity_utils",
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3d24310..8783f25 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -51,6 +51,8 @@
     self.apex_path = apex_path
     self.key_passwords = key_passwords
     self.codename_to_api_level_map = codename_to_api_level_map
+    self.debugfs_path = os.path.join(
+        OPTIONS.search_path, "bin", "debugfs_static")
 
   def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
     """Scans and signs the apk files and repack the apex
@@ -61,14 +63,13 @@
     Returns:
       The repacked apex file containing the signed apk files.
     """
-    debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
-    if not os.path.exists(debugfs_path):
+    if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
           "Couldn't find location of debugfs_static: " +
           "Path {} does not exist. ".format(debugfs_path) +
           "Make sure bin/debugfs_static can be found in -p <path>")
     list_cmd = ['deapexer', '--debugfs_path',
-                debugfs_path, 'list', self.apex_path]
+                self.debugfs_path, 'list', self.apex_path]
     entries_names = common.RunAndCheckOutput(list_cmd).split()
     apk_entries = [name for name in entries_names if name.endswith('.apk')]
 
@@ -98,8 +99,14 @@
 
   def ExtractApexPayloadAndSignApks(self, apk_entries, apk_keys):
     """Extracts the payload image and signs the containing apk files."""
+    if not os.path.exists(self.debugfs_path):
+      raise ApexSigningError(
+          "Couldn't find location of debugfs_static: " +
+          "Path {} does not exist. ".format(debugfs_path) +
+          "Make sure bin/debugfs_static can be found in -p <path>")
     payload_dir = common.MakeTempDir()
-    extract_cmd = ['deapexer', 'extract', self.apex_path, payload_dir]
+    extract_cmd = ['deapexer', '--debugfs_path',
+                   self.debugfs_path, 'extract', self.apex_path, payload_dir]
     common.RunAndCheckOutput(extract_cmd)
 
     has_signed_apk = False
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index ab38d0d..5409194 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -58,6 +58,7 @@
 OPTIONS.additional_entries = []
 OPTIONS.bootable_only = False
 OPTIONS.put_super = None
+OPTIONS.put_bootloader = None
 OPTIONS.dynamic_partition_list = None
 OPTIONS.super_device_list = None
 OPTIONS.retrofit_dap = None
@@ -75,6 +76,7 @@
     info = OPTIONS.info_dict = common.LoadInfoDict(input_zip)
 
   OPTIONS.put_super = info.get('super_image_in_update_package') == 'true'
+  OPTIONS.put_bootloader = info.get('bootloader_in_update_package') == 'true'
   OPTIONS.dynamic_partition_list = info.get('dynamic_partition_list',
                                             '').strip().split()
   OPTIONS.super_device_list = info.get('super_block_devices',
@@ -122,9 +124,11 @@
 
   for image_path in [name for name in namelist if name.startswith('IMAGES/')]:
     image = os.path.basename(image_path)
-    if OPTIONS.bootable_only and image not in ('boot.img', 'recovery.img'):
+    if OPTIONS.bootable_only and image not in('boot.img', 'recovery.img', 'bootloader'):
       continue
-    if not image.endswith('.img'):
+    if not image.endswith('.img') and image != 'bootloader':
+      continue
+    if image == 'bootloader' and not OPTIONS.put_bootloader:
       continue
     # Filter out super_empty and the images that are already in super partition.
     if OPTIONS.put_super:
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 3a87957..471ef25 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -276,7 +276,7 @@
 
   script.SetProgress(1)
   script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
-  metadata["ota-required-cache"] = str(script.required_cache)
+  metadata.required_cache = script.required_cache
 
   # We haven't written the metadata entry, which will be done in
   # FinalizeMetadata.
@@ -530,7 +530,7 @@
     script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
   else:
     script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
-  metadata["ota-required-cache"] = str(script.required_cache)
+  metadata.required_cache = script.required_cache
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index f42974f..93a3e0e 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -216,6 +216,7 @@
 import zipfile
 
 import common
+import ota_utils
 import target_files_diff
 from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
@@ -228,20 +229,16 @@
 
 logger = logging.getLogger(__name__)
 
-OPTIONS = common.OPTIONS
-OPTIONS.package_key = None
-OPTIONS.incremental_source = None
+OPTIONS = ota_utils.OPTIONS
 OPTIONS.verify = False
 OPTIONS.patch_threshold = 0.95
 OPTIONS.wipe_user_data = False
-OPTIONS.downgrade = False
 OPTIONS.extra_script = None
 OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
 if OPTIONS.worker_threads == 0:
   OPTIONS.worker_threads = 1
 OPTIONS.two_step = False
 OPTIONS.include_secondary = False
-OPTIONS.no_signing = False
 OPTIONS.block_based = True
 OPTIONS.updater_binary = None
 OPTIONS.oem_dicts = None
@@ -257,14 +254,9 @@
 OPTIONS.payload_signer_args = []
 OPTIONS.payload_signer_maximum_signature_size = None
 OPTIONS.extracted_input = None
-OPTIONS.key_passwords = []
 OPTIONS.skip_postinstall = False
-OPTIONS.retrofit_dynamic_partitions = False
 OPTIONS.skip_compatibility_check = False
-OPTIONS.output_metadata_path = None
 OPTIONS.disable_fec_computation = False
-OPTIONS.force_non_ab = False
-OPTIONS.boot_variable_file = None
 
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
@@ -856,7 +848,7 @@
   if OPTIONS.downgrade:
     max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
   else:
-    max_timestamp = metadata["post-timestamp"]
+    max_timestamp = str(metadata.postcondition.timestamp)
   additional_args = ["--max_timestamp", max_timestamp]
 
   payload.Generate(target_file, source_file, additional_args)
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
new file mode 100644
index 0000000..20d3091
--- /dev/null
+++ b/tools/releasetools/ota_metadata.proto
@@ -0,0 +1,95 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// If you change this file,
+// Please update ota_metadata_pb2.py by executing
+// protoc ota_metadata.proto --python_out $ANDROID_BUILD_TOP/build/tools/releasetools
+
+
+syntax = "proto3";
+
+package build.tools.releasetools;
+option optimize_for = LITE_RUNTIME;
+
+// The build information of a particular partition on the device.
+message PartitionState {
+  string partition_name = 1;
+  repeated string device = 2;
+  repeated string build = 3;
+  // The version string of the partition. It's usually timestamp if present.
+  // One known exception is the boot image, who uses the kmi version, e.g.
+  // 5.4.42-android12-0
+  string version = 4;
+
+  // TODO(xunchang), revisit other necessary fields, e.g. security_patch_level.
+}
+
+// The build information on the device. The bytes of the running images are thus
+// inferred from the device state. For more information of the meaning of each
+// subfield, check
+// https://source.android.com/compatibility/android-cdd#3_2_2_build_parameters
+message DeviceState {
+  // device name. i.e. ro.product.device; if the field has multiple values, it
+  // means the ota package supports multiple devices. This usually happens when
+  // we use the same image to support multiple skus.
+  repeated string device = 1;
+  // device fingerprint. Up to R build, the value reads from
+  // ro.build.fingerprint.
+  repeated string build = 2;
+  // A value that specify a version of the android build.
+  string build_incremental = 3;
+  // The timestamp when the build is generated.
+  int64 timestamp = 4;
+  // The version of the currently-executing Android system.
+  string sdk_level = 5;
+  // A value indicating the security patch level of a build.
+  string security_patch_level = 6;
+
+  // The detailed state of each partition. For partial updates or devices with
+  // mixed build of partitions, some of the above fields may left empty. And the
+  // client will rely on the information of specific partitions to target the
+  // update.
+  repeated PartitionState partition_state = 7;
+}
+
+// The metadata of an OTA package. It contains the information of the package
+// and prerequisite to install the update correctly.
+message OtaMetadata {
+  enum OtaType {
+    UNKNOWN = 0;
+    AB = 1;
+    BLOCK = 2;
+    BRICK = 3;
+  };
+  OtaType type = 1;
+  // True if we need to wipe after the update.
+  bool wipe = 2;
+  // True if the timestamp of the post build is older than the pre build.
+  bool downgrade = 3;
+  // A map of name:content of property files, e.g. ota-property-files.
+  map<string, string> property_files = 4;
+
+  // The required device state in order to install the package.
+  DeviceState precondition = 5;
+  // The expected device state after the update.
+  DeviceState postcondition = 6;
+
+  // True if the ota that updates a device to support dynamic partitions, where
+  // the source build doesn't support it.
+  bool retrofit_dynamic_partitions = 7;
+  // The required size of the cache partition, only valid for non-A/B update.
+  int64 required_cache = 8;
+}
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
new file mode 100644
index 0000000..ff2b2c5
--- /dev/null
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -0,0 +1,343 @@
+# -*- coding: utf-8 -*-
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: ota_metadata.proto
+
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='ota_metadata.proto',
+  package='build.tools.releasetools',
+  syntax='proto3',
+  serialized_options=b'H\003',
+  serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"\xe1\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+)
+
+
+
+_OTAMETADATA_OTATYPE = _descriptor.EnumDescriptor(
+  name='OtaType',
+  full_name='build.tools.releasetools.OtaMetadata.OtaType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='UNKNOWN', index=0, number=0,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='AB', index=1, number=1,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BLOCK', index=2, number=2,
+      serialized_options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='BRICK', index=3, number=3,
+      serialized_options=None,
+      type=None),
+  ],
+  containing_type=None,
+  serialized_options=None,
+  serialized_start=777,
+  serialized_end=829,
+)
+_sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
+
+
+_PARTITIONSTATE = _descriptor.Descriptor(
+  name='PartitionState',
+  full_name='build.tools.releasetools.PartitionState',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='device', full_name='build.tools.releasetools.PartitionState.device', index=1,
+      number=2, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='build', full_name='build.tools.releasetools.PartitionState.build', index=2,
+      number=3, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
+      number=4, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=48,
+  serialized_end=136,
+)
+
+
+_DEVICESTATE = _descriptor.Descriptor(
+  name='DeviceState',
+  full_name='build.tools.releasetools.DeviceState',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='device', full_name='build.tools.releasetools.DeviceState.device', index=0,
+      number=1, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='build', full_name='build.tools.releasetools.DeviceState.build', index=1,
+      number=2, type=9, cpp_type=9, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='timestamp', full_name='build.tools.releasetools.DeviceState.timestamp', index=3,
+      number=4, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
+      number=5, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
+      number=6, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='partition_state', full_name='build.tools.releasetools.DeviceState.partition_state', index=6,
+      number=7, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=139,
+  serialized_end=345,
+)
+
+
+_OTAMETADATA_PROPERTYFILESENTRY = _descriptor.Descriptor(
+  name='PropertyFilesEntry',
+  full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=b"".decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=b'8\001',
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=723,
+  serialized_end=775,
+)
+
+_OTAMETADATA = _descriptor.Descriptor(
+  name='OtaMetadata',
+  full_name='build.tools.releasetools.OtaMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='build.tools.releasetools.OtaMetadata.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='wipe', full_name='build.tools.releasetools.OtaMetadata.wipe', index=1,
+      number=2, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='downgrade', full_name='build.tools.releasetools.OtaMetadata.downgrade', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='property_files', full_name='build.tools.releasetools.OtaMetadata.property_files', index=3,
+      number=4, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='precondition', full_name='build.tools.releasetools.OtaMetadata.precondition', index=4,
+      number=5, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='postcondition', full_name='build.tools.releasetools.OtaMetadata.postcondition', index=5,
+      number=6, type=11, cpp_type=10, label=1,
+      has_default_value=False, default_value=None,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='retrofit_dynamic_partitions', full_name='build.tools.releasetools.OtaMetadata.retrofit_dynamic_partitions', index=6,
+      number=7, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='required_cache', full_name='build.tools.releasetools.OtaMetadata.required_cache', index=7,
+      number=8, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[_OTAMETADATA_PROPERTYFILESENTRY, ],
+  enum_types=[
+    _OTAMETADATA_OTATYPE,
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=348,
+  serialized_end=829,
+)
+
+_DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
+_OTAMETADATA_PROPERTYFILESENTRY.containing_type = _OTAMETADATA
+_OTAMETADATA.fields_by_name['type'].enum_type = _OTAMETADATA_OTATYPE
+_OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
+_OTAMETADATA.fields_by_name['precondition'].message_type = _DEVICESTATE
+_OTAMETADATA.fields_by_name['postcondition'].message_type = _DEVICESTATE
+_OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
+DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
+DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
+DESCRIPTOR.message_types_by_name['OtaMetadata'] = _OTAMETADATA
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+PartitionState = _reflection.GeneratedProtocolMessageType('PartitionState', (_message.Message,), {
+  'DESCRIPTOR' : _PARTITIONSTATE,
+  '__module__' : 'ota_metadata_pb2'
+  # @@protoc_insertion_point(class_scope:build.tools.releasetools.PartitionState)
+  })
+_sym_db.RegisterMessage(PartitionState)
+
+DeviceState = _reflection.GeneratedProtocolMessageType('DeviceState', (_message.Message,), {
+  'DESCRIPTOR' : _DEVICESTATE,
+  '__module__' : 'ota_metadata_pb2'
+  # @@protoc_insertion_point(class_scope:build.tools.releasetools.DeviceState)
+  })
+_sym_db.RegisterMessage(DeviceState)
+
+OtaMetadata = _reflection.GeneratedProtocolMessageType('OtaMetadata', (_message.Message,), {
+
+  'PropertyFilesEntry' : _reflection.GeneratedProtocolMessageType('PropertyFilesEntry', (_message.Message,), {
+    'DESCRIPTOR' : _OTAMETADATA_PROPERTYFILESENTRY,
+    '__module__' : 'ota_metadata_pb2'
+    # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata.PropertyFilesEntry)
+    })
+  ,
+  'DESCRIPTOR' : _OTAMETADATA,
+  '__module__' : 'ota_metadata_pb2'
+  # @@protoc_insertion_point(class_scope:build.tools.releasetools.OtaMetadata)
+  })
+_sym_db.RegisterMessage(OtaMetadata)
+_sym_db.RegisterMessage(OtaMetadata.PropertyFilesEntry)
+
+
+DESCRIPTOR._options = None
+_OTAMETADATA_PROPERTYFILESENTRY._options = None
+# @@protoc_insertion_point(module_scope)
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 874ab95..2e26a05 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -17,11 +17,25 @@
 import os
 import zipfile
 
+import ota_metadata_pb2
 from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_CARE_MAP, PartitionBuildProps)
 
+
+OPTIONS.no_signing = False
+OPTIONS.force_non_ab = False
+OPTIONS.wipe_user_data = False
+OPTIONS.downgrade = False
+OPTIONS.key_passwords = {}
+OPTIONS.package_key = None
+OPTIONS.incremental_source = None
+OPTIONS.retrofit_dynamic_partitions = False
+OPTIONS.output_metadata_path = None
+OPTIONS.boot_variable_file = None
+
 METADATA_NAME = 'META-INF/com/android/metadata'
+METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
 
 
@@ -50,11 +64,12 @@
     # Write the current metadata entry with placeholders.
     with zipfile.ZipFile(input_file) as input_zip:
       for property_files in needed_property_files:
-        metadata[property_files.name] = property_files.Compute(input_zip)
+        metadata.property_files[property_files.name] = property_files.Compute(
+            input_zip)
       namelist = input_zip.namelist()
 
-    if METADATA_NAME in namelist:
-      ZipDelete(input_file, METADATA_NAME)
+    if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
+      ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
     output_zip = zipfile.ZipFile(input_file, 'a')
     WriteMetadata(metadata, output_zip)
     ZipClose(output_zip)
@@ -69,8 +84,9 @@
   def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
     with zipfile.ZipFile(prelim_signing) as prelim_signing_zip:
       for property_files in needed_property_files:
-        metadata[property_files.name] = property_files.Finalize(
-            prelim_signing_zip, len(metadata[property_files.name]))
+        metadata.property_files[property_files.name] = property_files.Finalize(
+            prelim_signing_zip,
+            len(metadata.property_files[property_files.name]))
 
   # SignOutput(), which in turn calls signapk.jar, will possibly reorder the ZIP
   # entries, as well as padding the entry headers. We do a preliminary signing
@@ -91,7 +107,7 @@
     FinalizeAllPropertyFiles(prelim_signing, needed_property_files)
 
   # Replace the METADATA entry.
-  ZipDelete(prelim_signing, METADATA_NAME)
+  ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
   output_zip = zipfile.ZipFile(prelim_signing, 'a')
   WriteMetadata(metadata, output_zip)
   ZipClose(output_zip)
@@ -105,7 +121,8 @@
   # Reopen the final signed zip to double check the streaming metadata.
   with zipfile.ZipFile(output_file) as output_zip:
     for property_files in needed_property_files:
-      property_files.Verify(output_zip, metadata[property_files.name].strip())
+      property_files.Verify(
+          output_zip, metadata.property_files[property_files.name].strip())
 
   # If requested, dump the metadata to a separate file.
   output_metadata_path = OPTIONS.output_metadata_path
@@ -113,30 +130,93 @@
     WriteMetadata(metadata, output_metadata_path)
 
 
-def WriteMetadata(metadata, output):
+def WriteMetadata(metadata_proto, output):
   """Writes the metadata to the zip archive or a file.
 
   Args:
-    metadata: The metadata dict for the package.
-    output: A ZipFile object or a string of the output file path.
+    metadata_proto: The metadata protobuf for the package.
+    output: A ZipFile object or a string of the output file path. If a string
+      path is given, the metadata in the protobuf format will be written to
+      {output}.pb, e.g. ota_metadata.pb
   """
 
-  value = "".join(["%s=%s\n" % kv for kv in sorted(metadata.items())])
+  metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
+  legacy_metadata = "".join(["%s=%s\n" % kv for kv in
+                             sorted(metadata_dict.items())])
   if isinstance(output, zipfile.ZipFile):
-    ZipWriteStr(output, METADATA_NAME, value,
+    ZipWriteStr(output, METADATA_PROTO_NAME, metadata_proto.SerializeToString(),
+                compress_type=zipfile.ZIP_STORED)
+    ZipWriteStr(output, METADATA_NAME, legacy_metadata,
                 compress_type=zipfile.ZIP_STORED)
     return
 
+  with open('{}.pb'.format(output), 'w') as f:
+    f.write(metadata_proto.SerializeToString())
   with open(output, 'w') as f:
-    f.write(value)
+    f.write(legacy_metadata)
+
+
+def UpdateDeviceState(device_state, build_info, boot_variable_values,
+                      is_post_build):
+  """Update the fields of the DeviceState proto with build info."""
+
+  def UpdatePartitionStates(partition_states):
+    """Update the per-partition state according to its build.prop"""
+
+    build_info_set = ComputeRuntimeBuildInfos(build_info,
+                                              boot_variable_values)
+    for partition in PARTITIONS_WITH_CARE_MAP:
+      partition_prop = build_info.info_dict.get(
+          '{}.build.prop'.format(partition))
+      # Skip if the partition is missing, or it doesn't have a build.prop
+      if not partition_prop or not partition_prop.build_props:
+        continue
+
+      partition_state = partition_states.add()
+      partition_state.partition_name = partition
+      # Update the partition's runtime device names and fingerprints
+      partition_devices = set()
+      partition_fingerprints = set()
+      for runtime_build_info in build_info_set:
+        partition_devices.add(
+            runtime_build_info.GetPartitionBuildProp('ro.product.device',
+                                                     partition))
+        partition_fingerprints.add(
+            runtime_build_info.GetPartitionFingerprint(partition))
+
+      partition_state.device.extend(sorted(partition_devices))
+      partition_state.build.extend(sorted(partition_fingerprints))
+
+      # TODO(xunchang) set the boot image's version with kmi. Note the boot
+      # image doesn't have a file map.
+      partition_state.version = build_info.GetPartitionBuildProp(
+          'ro.build.date.utc', partition)
+
+  # TODO(xunchang), we can save a call to ComputeRuntimeBuildInfos.
+  build_devices, build_fingerprints = \
+      CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values)
+  device_state.device.extend(sorted(build_devices))
+  device_state.build.extend(sorted(build_fingerprints))
+  device_state.build_incremental = build_info.GetBuildProp(
+      'ro.build.version.incremental')
+
+  UpdatePartitionStates(device_state.partition_state)
+
+  if is_post_build:
+    device_state.sdk_level = build_info.GetBuildProp(
+        'ro.build.version.sdk')
+    device_state.security_patch_level = build_info.GetBuildProp(
+        'ro.build.version.security_patch')
+    # Use the actual post-timestamp, even for a downgrade case.
+    device_state.timestamp = int(build_info.GetBuildProp('ro.build.date.utc'))
 
 
 def GetPackageMetadata(target_info, source_info=None):
-  """Generates and returns the metadata dict.
+  """Generates and returns the metadata proto.
 
-  It generates a dict() that contains the info to be written into an OTA
-  package (META-INF/com/android/metadata). It also handles the detection of
-  downgrade / data wipe based on the global options.
+  It generates a ota_metadata protobuf that contains the info to be written
+  into an OTA package (META-INF/com/android/metadata.pb). It also handles the
+  detection of downgrade / data wipe based on the global options.
 
   Args:
     target_info: The BuildInfo instance that holds the target build info.
@@ -144,66 +224,96 @@
         None if generating full OTA.
 
   Returns:
-    A dict to be written into package metadata entry.
+    A protobuf to be written into package metadata entry.
   """
   assert isinstance(target_info, BuildInfo)
   assert source_info is None or isinstance(source_info, BuildInfo)
 
-  separator = '|'
-
   boot_variable_values = {}
   if OPTIONS.boot_variable_file:
     d = LoadDictionaryFromFile(OPTIONS.boot_variable_file)
     for key, values in d.items():
       boot_variable_values[key] = [val.strip() for val in values.split(',')]
 
-  post_build_devices, post_build_fingerprints = \
-      CalculateRuntimeDevicesAndFingerprints(target_info, boot_variable_values)
-  metadata = {
-      'post-build': separator.join(sorted(post_build_fingerprints)),
-      'post-build-incremental': target_info.GetBuildProp(
-          'ro.build.version.incremental'),
-      'post-sdk-level': target_info.GetBuildProp(
-          'ro.build.version.sdk'),
-      'post-security-patch-level': target_info.GetBuildProp(
-          'ro.build.version.security_patch'),
-  }
+  metadata_proto = ota_metadata_pb2.OtaMetadata()
+  # TODO(xunchang) some fields, e.g. post-device isn't necessary. We can
+  # consider skipping them if they aren't used by clients.
+  UpdateDeviceState(metadata_proto.postcondition, target_info,
+                    boot_variable_values, True)
 
   if target_info.is_ab and not OPTIONS.force_non_ab:
-    metadata['ota-type'] = 'AB'
-    metadata['ota-required-cache'] = '0'
+    metadata_proto.type = ota_metadata_pb2.OtaMetadata.AB
+    metadata_proto.required_cache = 0
   else:
-    metadata['ota-type'] = 'BLOCK'
+    metadata_proto.type = ota_metadata_pb2.OtaMetadata.BLOCK
+    # cache requirement will be updated by the non-A/B codes.
 
   if OPTIONS.wipe_user_data:
-    metadata['ota-wipe'] = 'yes'
+    metadata_proto.wipe = True
 
   if OPTIONS.retrofit_dynamic_partitions:
-    metadata['ota-retrofit-dynamic-partitions'] = 'yes'
+    metadata_proto.retrofit_dynamic_partitions = True
 
   is_incremental = source_info is not None
   if is_incremental:
-    pre_build_devices, pre_build_fingerprints = \
-        CalculateRuntimeDevicesAndFingerprints(source_info,
-                                               boot_variable_values)
-    metadata['pre-build'] = separator.join(sorted(pre_build_fingerprints))
-    metadata['pre-build-incremental'] = source_info.GetBuildProp(
-        'ro.build.version.incremental')
-    metadata['pre-device'] = separator.join(sorted(pre_build_devices))
+    UpdateDeviceState(metadata_proto.precondition, source_info,
+                      boot_variable_values, False)
   else:
-    metadata['pre-device'] = separator.join(sorted(post_build_devices))
-
-  # Use the actual post-timestamp, even for a downgrade case.
-  metadata['post-timestamp'] = target_info.GetBuildProp('ro.build.date.utc')
+    metadata_proto.precondition.device.extend(
+        metadata_proto.postcondition.device)
 
   # Detect downgrades and set up downgrade flags accordingly.
   if is_incremental:
-    HandleDowngradeMetadata(metadata, target_info, source_info)
+    HandleDowngradeMetadata(metadata_proto, target_info, source_info)
 
-  return metadata
+  return metadata_proto
 
 
-def HandleDowngradeMetadata(metadata, target_info, source_info):
+def BuildLegacyOtaMetadata(metadata_proto):
+  """Converts the metadata proto to a legacy metadata dict.
+
+  This metadata dict is used to build the legacy metadata text file for
+  backward compatibility. We won't add new keys to the legacy metadata format.
+  If new information is needed, we should add it as a new field in OtaMetadata
+  proto definition.
+  """
+
+  separator = '|'
+
+  metadata_dict = {}
+  if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+    metadata_dict['ota-type'] = 'AB'
+  elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+    metadata_dict['ota-type'] = 'BLOCK'
+  if metadata_proto.wipe:
+    metadata_dict['ota-wipe'] = 'yes'
+  if metadata_proto.retrofit_dynamic_partitions:
+    metadata_dict['ota-retrofit-dynamic-partitions'] = 'yes'
+  if metadata_proto.downgrade:
+    metadata_dict['ota-downgrade'] = 'yes'
+
+  metadata_dict['ota-required-cache'] = str(metadata_proto.required_cache)
+
+  post_build = metadata_proto.postcondition
+  metadata_dict['post-build'] = separator.join(post_build.build)
+  metadata_dict['post-build-incremental'] = post_build.build_incremental
+  metadata_dict['post-sdk-level'] = post_build.sdk_level
+  metadata_dict['post-security-patch-level'] = post_build.security_patch_level
+  metadata_dict['post-timestamp'] = str(post_build.timestamp)
+
+  pre_build = metadata_proto.precondition
+  metadata_dict['pre-device'] = separator.join(pre_build.device)
+  # incremental updates
+  if len(pre_build.build) != 0:
+    metadata_dict['pre-build'] = separator.join(pre_build.build)
+    metadata_dict['pre-build-incremental'] = pre_build.build_incremental
+
+  metadata_dict.update(metadata_proto.property_files)
+
+  return metadata_dict
+
+
+def HandleDowngradeMetadata(metadata_proto, target_info, source_info):
   # Only incremental OTAs are allowed to reach here.
   assert OPTIONS.incremental_source is not None
 
@@ -216,7 +326,7 @@
       raise RuntimeError(
           "--downgrade or --override_timestamp specified but no downgrade "
           "detected: pre: %s, post: %s" % (pre_timestamp, post_timestamp))
-    metadata["ota-downgrade"] = "yes"
+    metadata_proto.downgrade = True
   else:
     if is_downgrade:
       raise RuntimeError(
@@ -225,14 +335,12 @@
           "building the incremental." % (pre_timestamp, post_timestamp))
 
 
-def CalculateRuntimeDevicesAndFingerprints(build_info, boot_variable_values):
-  """Returns a tuple of sets for runtime devices and fingerprints"""
+def ComputeRuntimeBuildInfos(default_build_info, boot_variable_values):
+  """Returns a set of build info objects that may exist during runtime."""
 
-  device_names = {build_info.device}
-  fingerprints = {build_info.fingerprint}
-
+  build_info_set = {default_build_info}
   if not boot_variable_values:
-    return device_names, fingerprints
+    return build_info_set
 
   # Calculate all possible combinations of the values for the boot variables.
   keys = boot_variable_values.keys()
@@ -242,7 +350,7 @@
   for placeholder_values in combinations:
     # Reload the info_dict as some build properties may change their values
     # based on the value of ro.boot* properties.
-    info_dict = copy.deepcopy(build_info.info_dict)
+    info_dict = copy.deepcopy(default_build_info.info_dict)
     for partition in PARTITIONS_WITH_CARE_MAP:
       partition_prop_key = "{}.build.prop".format(partition)
       input_file = info_dict[partition_prop_key].input_file
@@ -256,10 +364,22 @@
             PartitionBuildProps.FromInputFile(input_file, partition,
                                               placeholder_values)
     info_dict["build.prop"] = info_dict["system.build.prop"]
+    build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
 
-    new_build_info = BuildInfo(info_dict, build_info.oem_dicts)
-    device_names.add(new_build_info.device)
-    fingerprints.add(new_build_info.fingerprint)
+  return build_info_set
+
+
+def CalculateRuntimeDevicesAndFingerprints(default_build_info,
+                                           boot_variable_values):
+  """Returns a tuple of sets for runtime devices and fingerprints"""
+
+  device_names = set()
+  fingerprints = set()
+  build_info_set = ComputeRuntimeBuildInfos(default_build_info,
+                                            boot_variable_values)
+  for runtime_build_info in build_info_set:
+    device_names.add(runtime_build_info.device)
+    fingerprints.add(runtime_build_info.fingerprint)
   return device_names, fingerprints
 
 
@@ -403,8 +523,10 @@
     # reserved space serves the metadata entry only.
     if reserve_space:
       tokens.append('metadata:' + ' ' * 15)
+      tokens.append('metadata.pb:' + ' ' * 15)
     else:
       tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
+      tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
 
     return ','.join(tokens)
 
diff --git a/tools/releasetools/test_apex_utils.py b/tools/releasetools/test_apex_utils.py
index 7b4a4b0..339ddc7 100644
--- a/tools/releasetools/test_apex_utils.py
+++ b/tools/releasetools/test_apex_utils.py
@@ -160,7 +160,7 @@
 
     self.payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
     apex_file = signer.ProcessApexFile(apk_keys, self.payload_key)
-    package_name_extract_cmd = ['aapt', 'dump', 'badging', apex_file]
+    package_name_extract_cmd = ['aapt2', 'dump', 'badging', apex_file]
     output = common.RunAndCheckOutput(package_name_extract_cmd)
     for line in output.splitlines():
       # Sample output from aapt: "package: name='com.google.android.wifi'
diff --git a/tools/releasetools/test_non_ab_ota.py b/tools/releasetools/test_non_ab_ota.py
index ee1b411..5207e2f 100644
--- a/tools/releasetools/test_non_ab_ota.py
+++ b/tools/releasetools/test_non_ab_ota.py
@@ -42,12 +42,13 @@
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
-    self.assertEqual(1, len(tokens))
+    self.assertEqual(2, len(tokens))
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Finalize(self):
     entries = [
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     ]
     zip_file = self.construct_zip_package(entries)
     property_files = NonAbOtaPropertyFiles()
@@ -57,14 +58,16 @@
       property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
     tokens = self._parse_property_files_string(property_files_string)
 
-    self.assertEqual(1, len(tokens))
+    self.assertEqual(2, len(tokens))
     # 'META-INF/com/android/metadata' will be key'd as 'metadata'.
     entries[0] = 'metadata'
+    entries[1] = 'metadata.pb'
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Verify(self):
     entries = (
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     )
     zip_file = self.construct_zip_package(entries)
     property_files = NonAbOtaPropertyFiles()
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 52aa487..6f5e78f 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -20,17 +20,20 @@
 import zipfile
 
 import common
+import ota_metadata_pb2
 import test_utils
-from ota_utils import CalculateRuntimeDevicesAndFingerprints
+from ota_utils import (
+    BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
+    FinalizeMetadata, GetPackageMetadata, PropertyFiles)
 from ota_from_target_files import (
-    _LoadOemDicts, AbOtaPropertyFiles, FinalizeMetadata,
-    GetPackageMetadata, GetTargetFilesZipForSecondaryImages,
+    _LoadOemDicts, AbOtaPropertyFiles,
+    GetTargetFilesZipForSecondaryImages,
     GetTargetFilesZipWithoutPostinstallConfig,
-    Payload, PayloadSigner, POSTINSTALL_CONFIG, PropertyFiles,
+    Payload, PayloadSigner, POSTINSTALL_CONFIG,
     StreamingPropertyFiles)
-from non_ab_ota import NonAbOtaPropertyFiles
 from test_utils import PropertyFilesTestCase
 
+
 def construct_target_files(secondary=False):
   """Returns a target-files.zip file for generating OTA packages."""
   target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
@@ -143,14 +146,13 @@
       ),
       'vendor.build.prop': common.PartitionBuildProps.FromDictionary(
           'vendor', {
-               'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
+              'ro.vendor.build.fingerprint': 'vendor-build-fingerprint'}
       ),
       'property1': 'value1',
       'property2': 4096,
       'oem_fingerprint_properties': 'ro.product.device ro.product.brand',
   }
 
-
   def setUp(self):
     self.testdata_dir = test_utils.get_testdata_dir()
     self.assertTrue(os.path.exists(self.testdata_dir))
@@ -169,11 +171,16 @@
 
     common.OPTIONS.search_path = test_utils.get_search_path()
 
+  @staticmethod
+  def GetLegacyOtaMetadata(target_info, source_info=None):
+    metadata_proto = GetPackageMetadata(target_info, source_info)
+    return BuildLegacyOtaMetadata(metadata_proto)
+
   def test_GetPackageMetadata_abOta_full(self):
     target_info_dict = copy.deepcopy(self.TEST_TARGET_INFO_DICT)
     target_info_dict['ab_update'] = 'true'
     target_info = common.BuildInfo(target_info_dict, None)
-    metadata = GetPackageMetadata(target_info)
+    metadata = self.GetLegacyOtaMetadata(target_info)
     self.assertDictEqual(
         {
             'ota-type' : 'AB',
@@ -193,7 +200,7 @@
     target_info = common.BuildInfo(target_info_dict, None)
     source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
     common.OPTIONS.incremental_source = ''
-    metadata = GetPackageMetadata(target_info, source_info)
+    metadata = self.GetLegacyOtaMetadata(target_info, source_info)
     self.assertDictEqual(
         {
             'ota-type' : 'AB',
@@ -211,10 +218,11 @@
 
   def test_GetPackageMetadata_nonAbOta_full(self):
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
-    metadata = GetPackageMetadata(target_info)
+    metadata = self.GetLegacyOtaMetadata(target_info)
     self.assertDictEqual(
         {
             'ota-type' : 'BLOCK',
+            'ota-required-cache' : '0',
             'post-build' : 'build-fingerprint-target',
             'post-build-incremental' : 'build-version-incremental-target',
             'post-sdk-level' : '27',
@@ -228,10 +236,11 @@
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
     source_info = common.BuildInfo(self.TEST_SOURCE_INFO_DICT, None)
     common.OPTIONS.incremental_source = ''
-    metadata = GetPackageMetadata(target_info, source_info)
+    metadata = self.GetLegacyOtaMetadata(target_info, source_info)
     self.assertDictEqual(
         {
             'ota-type' : 'BLOCK',
+            'ota-required-cache' : '0',
             'post-build' : 'build-fingerprint-target',
             'post-build-incremental' : 'build-version-incremental-target',
             'post-sdk-level' : '27',
@@ -246,10 +255,11 @@
   def test_GetPackageMetadata_wipe(self):
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
     common.OPTIONS.wipe_user_data = True
-    metadata = GetPackageMetadata(target_info)
+    metadata = self.GetLegacyOtaMetadata(target_info)
     self.assertDictEqual(
         {
             'ota-type' : 'BLOCK',
+            'ota-required-cache' : '0',
             'ota-wipe' : 'yes',
             'post-build' : 'build-fingerprint-target',
             'post-build-incremental' : 'build-version-incremental-target',
@@ -263,11 +273,12 @@
   def test_GetPackageMetadata_retrofitDynamicPartitions(self):
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
     common.OPTIONS.retrofit_dynamic_partitions = True
-    metadata = GetPackageMetadata(target_info)
+    metadata = self.GetLegacyOtaMetadata(target_info)
     self.assertDictEqual(
         {
             'ota-retrofit-dynamic-partitions' : 'yes',
             'ota-type' : 'BLOCK',
+            'ota-required-cache' : '0',
             'post-build' : 'build-fingerprint-target',
             'post-build-incremental' : 'build-version-incremental-target',
             'post-sdk-level' : '27',
@@ -293,7 +304,7 @@
     target_info = common.BuildInfo(target_info_dict, None)
     source_info = common.BuildInfo(source_info_dict, None)
     common.OPTIONS.incremental_source = ''
-    self.assertRaises(RuntimeError, GetPackageMetadata, target_info,
+    self.assertRaises(RuntimeError, self.GetLegacyOtaMetadata, target_info,
                       source_info)
 
   def test_GetPackageMetadata_downgrade(self):
@@ -307,11 +318,13 @@
     common.OPTIONS.incremental_source = ''
     common.OPTIONS.downgrade = True
     common.OPTIONS.wipe_user_data = True
-    metadata = GetPackageMetadata(target_info, source_info)
+    metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+
     self.assertDictEqual(
         {
             'ota-downgrade' : 'yes',
             'ota-type' : 'BLOCK',
+            'ota-required-cache' : '0',
             'ota-wipe' : 'yes',
             'post-build' : 'build-fingerprint-target',
             'post-build-incremental' : 'build-version-incremental-target',
@@ -464,13 +477,13 @@
             'A' * 1024 * 1024 * 1024,
             zipfile.ZIP_STORED)
 
-    metadata = {}
+    metadata = ota_metadata_pb2.OtaMetadata()
     output_file = common.MakeTempFile(suffix='.zip')
     needed_property_files = (
         TestPropertyFiles(),
     )
     FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
-    self.assertIn('ota-test-property-files', metadata)
+    self.assertIn('ota-test-property-files', metadata.property_files)
 
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_FinalizeMetadata(self):
@@ -508,13 +521,13 @@
           'A' * 1024 * 1024,
           zipfile.ZIP_STORED)
 
-    metadata = {}
+    metadata = ota_metadata_pb2.OtaMetadata()
     needed_property_files = (
         TestPropertyFiles(),
     )
     output_file = common.MakeTempFile(suffix='.zip')
     FinalizeMetadata(metadata, zip_file, output_file, needed_property_files)
-    self.assertIn('ota-test-property-files', metadata)
+    self.assertIn('ota-test-property-files', metadata.property_files)
 
 
 class TestPropertyFiles(PropertyFiles):
@@ -532,8 +545,8 @@
         'optional-entry2',
     )
 
-class PropertyFilesTest(PropertyFilesTestCase):
 
+class PropertyFilesTest(PropertyFilesTestCase):
 
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_Compute(self):
@@ -547,7 +560,7 @@
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
-    self.assertEqual(3, len(tokens))
+    self.assertEqual(4, len(tokens))
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Compute_withOptionalEntries(self):
@@ -563,7 +576,7 @@
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
-    self.assertEqual(5, len(tokens))
+    self.assertEqual(6, len(tokens))
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Compute_missingRequiredEntry(self):
@@ -581,6 +594,7 @@
         'required-entry1',
         'required-entry2',
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     ]
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
@@ -590,10 +604,11 @@
       streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
     tokens = self._parse_property_files_string(streaming_metadata)
 
-    self.assertEqual(3, len(tokens))
+    self.assertEqual(4, len(tokens))
     # 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
     # streaming metadata.
     entries[2] = 'metadata'
+    entries[3] = 'metadata.pb'
     self._verify_entries(zip_file, tokens, entries)
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -604,6 +619,7 @@
         'optional-entry1',
         'optional-entry2',
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
@@ -638,6 +654,7 @@
         'optional-entry1',
         'optional-entry2',
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     )
     zip_file = self.construct_zip_package(entries)
     property_files = TestPropertyFiles()
@@ -687,7 +704,7 @@
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
-    self.assertEqual(5, len(tokens))
+    self.assertEqual(6, len(tokens))
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Finalize(self):
@@ -697,6 +714,7 @@
         'care_map.txt',
         'compatibility.zip',
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     ]
     zip_file = self.construct_zip_package(entries)
     property_files = StreamingPropertyFiles()
@@ -706,10 +724,11 @@
       streaming_metadata = property_files.Finalize(zip_fp, len(raw_metadata))
     tokens = self._parse_property_files_string(streaming_metadata)
 
-    self.assertEqual(5, len(tokens))
+    self.assertEqual(6, len(tokens))
     # 'META-INF/com/android/metadata' will be key'd as 'metadata' in the
     # streaming metadata.
     entries[4] = 'metadata'
+    entries[5] = 'metadata.pb'
     self._verify_entries(zip_file, tokens, entries)
 
   def test_Verify(self):
@@ -719,6 +738,7 @@
         'care_map.txt',
         'compatibility.zip',
         'META-INF/com/android/metadata',
+        'META-INF/com/android/metadata.pb',
     )
     zip_file = self.construct_zip_package(entries)
     property_files = StreamingPropertyFiles()
@@ -855,6 +875,7 @@
       # Put META-INF/com/android/metadata if needed.
       if with_metadata:
         entries.append('META-INF/com/android/metadata')
+        entries.append('META-INF/com/android/metadata.pb')
 
       for entry in entries:
         zip_fp.writestr(
@@ -870,9 +891,9 @@
       property_files_string = property_files.Compute(zip_fp)
 
     tokens = self._parse_property_files_string(property_files_string)
-    # "6" indcludes the four entries above, one metadata entry, and one entry
+    # "7" indcludes the four entries above, two metadata entries, and one entry
     # for payload-metadata.bin.
-    self.assertEqual(6, len(tokens))
+    self.assertEqual(7, len(tokens))
     self._verify_entries(
         zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
 
@@ -886,9 +907,9 @@
       property_files_string = property_files.Finalize(zip_fp, len(raw_metadata))
 
     tokens = self._parse_property_files_string(property_files_string)
-    # "6" indcludes the four entries above, one metadata entry, and one entry
+    # "7" includes the four entries above, two metadata entries, and one entry
     # for payload-metadata.bin.
-    self.assertEqual(6, len(tokens))
+    self.assertEqual(7, len(tokens))
     self._verify_entries(
         zip_file, tokens, ('care_map.txt', 'compatibility.zip'))
 
@@ -1187,10 +1208,29 @@
       'ro.build.tags=build-tags',
       'ro.build.version.sdk=30',
       'ro.build.version.security_patch=2020',
-      'ro.build.date.utc=12345678'
+      'ro.build.date.utc=12345678',
+      'ro.system.build.version.release=version-release',
+      'ro.system.build.id=build-id',
+      'ro.system.build.version.incremental=version-incremental',
+      'ro.system.build.type=build-type',
+      'ro.system.build.tags=build-tags',
+      'ro.system.build.version.sdk=30',
+      'ro.system.build.version.security_patch=2020',
+      'ro.system.build.date.utc=12345678',
+      'ro.product.system.brand=generic',
+      'ro.product.system.name=generic',
+      'ro.product.system.device=generic',
   ]
 
   VENDOR_BUILD_PROP = [
+      'ro.vendor.build.version.release=version-release',
+      'ro.vendor.build.id=build-id',
+      'ro.vendor.build.version.incremental=version-incremental',
+      'ro.vendor.build.type=build-type',
+      'ro.vendor.build.tags=build-tags',
+      'ro.vendor.build.version.sdk=30',
+      'ro.vendor.build.version.security_patch=2020',
+      'ro.vendor.build.date.utc=12345678',
       'ro.product.vendor.brand=vendor-product-brand',
       'ro.product.vendor.name=vendor-product-name',
       'ro.product.vendor.device=vendor-product-device'
@@ -1326,8 +1366,8 @@
       f.write('ro.boot.sku_name=std,pro')
 
     build_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
-    metadata = GetPackageMetadata(build_info)
-    self.assertEqual('vendor-product-device', metadata['pre-device'])
+    metadata_dict = BuildLegacyOtaMetadata(GetPackageMetadata(build_info))
+    self.assertEqual('vendor-product-device', metadata_dict['pre-device'])
     fingerprints = [
         self.constructFingerprint(
             'vendor-product-brand/vendor-product-name/vendor-product-device'),
@@ -1336,7 +1376,33 @@
         self.constructFingerprint(
             'vendor-product-brand/vendor-product-std/vendor-product-device'),
     ]
-    self.assertEqual('|'.join(fingerprints), metadata['post-build'])
+    self.assertEqual('|'.join(fingerprints), metadata_dict['post-build'])
+
+  def CheckMetadataEqual(self, metadata_dict, metadata_proto):
+    post_build = metadata_proto.postcondition
+    self.assertEqual('|'.join(post_build.build),
+                     metadata_dict['post-build'])
+    self.assertEqual(post_build.build_incremental,
+                     metadata_dict['post-build-incremental'])
+    self.assertEqual(post_build.sdk_level,
+                     metadata_dict['post-sdk-level'])
+    self.assertEqual(post_build.security_patch_level,
+                     metadata_dict['post-security-patch-level'])
+
+    if metadata_proto.type == ota_metadata_pb2.OtaMetadata.AB:
+      ota_type = 'AB'
+    elif metadata_proto.type == ota_metadata_pb2.OtaMetadata.BLOCK:
+      ota_type = 'BLOCK'
+    else:
+      ota_type = ''
+    self.assertEqual(ota_type, metadata_dict['ota-type'])
+    self.assertEqual(metadata_proto.wipe,
+                     metadata_dict.get('ota-wipe') == 'yes')
+    self.assertEqual(metadata_proto.required_cache,
+                     int(metadata_dict.get('ota-required-cache', 0)))
+    self.assertEqual(metadata_proto.retrofit_dynamic_partitions,
+                     metadata_dict.get(
+                         'ota-retrofit-dynamic-partitions') == 'yes')
 
   def test_GetPackageMetadata_incremental_package(self):
     vendor_build_prop = copy.deepcopy(self.VENDOR_BUILD_PROP)
@@ -1365,7 +1431,18 @@
         'ro.build.tags=build-tags',
         'ro.build.version.sdk=29',
         'ro.build.version.security_patch=2020',
-        'ro.build.date.utc=12340000'
+        'ro.build.date.utc=12340000',
+        'ro.system.build.version.release=source-version-release',
+        'ro.system.build.id=source-build-id',
+        'ro.system.build.version.incremental=source-version-incremental',
+        'ro.system.build.type=build-type',
+        'ro.system.build.tags=build-tags',
+        'ro.system.build.version.sdk=29',
+        'ro.system.build.version.security_patch=2020',
+        'ro.system.build.date.utc=12340000',
+        'ro.product.system.brand=generic',
+        'ro.product.system.name=generic',
+        'ro.product.system.device=generic',
     ]
     self.writeFiles({
         'META/misc_info.txt': '\n'.join(self.MISC_INFO),
@@ -1381,21 +1458,22 @@
     target_info = common.BuildInfo(common.LoadInfoDict(self.test_dir))
     source_info = common.BuildInfo(common.LoadInfoDict(source_dir))
 
-    metadata = GetPackageMetadata(target_info, source_info)
+    metadata_proto = GetPackageMetadata(target_info, source_info)
+    metadata_dict = BuildLegacyOtaMetadata(metadata_proto)
     self.assertEqual(
         'vendor-device-pro|vendor-device-std|vendor-product-device',
-        metadata['pre-device'])
-    suffix = ':source-version-release/source-build-id/' \
-             'source-version-incremental:build-type/build-tags'
+        metadata_dict['pre-device'])
+    source_suffix = ':source-version-release/source-build-id/' \
+                    'source-version-incremental:build-type/build-tags'
     pre_fingerprints = [
         'vendor-product-brand/vendor-product-name/vendor-device-pro'
-        '{}'.format(suffix),
+        '{}'.format(source_suffix),
         'vendor-product-brand/vendor-product-name/vendor-device-std'
-        '{}'.format(suffix),
+        '{}'.format(source_suffix),
         'vendor-product-brand/vendor-product-name/vendor-product-device'
-        '{}'.format(suffix),
+        '{}'.format(source_suffix),
     ]
-    self.assertEqual('|'.join(pre_fingerprints), metadata['pre-build'])
+    self.assertEqual('|'.join(pre_fingerprints), metadata_dict['pre-build'])
 
     post_fingerprints = [
         self.constructFingerprint(
@@ -1405,4 +1483,31 @@
         self.constructFingerprint(
             'vendor-product-brand/vendor-product-name/vendor-product-device'),
     ]
-    self.assertEqual('|'.join(post_fingerprints), metadata['post-build'])
+    self.assertEqual('|'.join(post_fingerprints), metadata_dict['post-build'])
+
+    self.CheckMetadataEqual(metadata_dict, metadata_proto)
+
+    pre_partition_states = metadata_proto.precondition.partition_state
+    self.assertEqual(2, len(pre_partition_states))
+    self.assertEqual('system', pre_partition_states[0].partition_name)
+    self.assertEqual(['generic'], pre_partition_states[0].device)
+    self.assertEqual(['generic/generic/generic{}'.format(source_suffix)],
+                     pre_partition_states[0].build)
+
+    self.assertEqual('vendor', pre_partition_states[1].partition_name)
+    self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+                      'vendor-product-device'], pre_partition_states[1].device)
+    vendor_fingerprints = post_fingerprints
+    self.assertEqual(vendor_fingerprints, pre_partition_states[1].build)
+
+    post_partition_states = metadata_proto.postcondition.partition_state
+    self.assertEqual(2, len(post_partition_states))
+    self.assertEqual('system', post_partition_states[0].partition_name)
+    self.assertEqual(['generic'], post_partition_states[0].device)
+    self.assertEqual([self.constructFingerprint('generic/generic/generic')],
+                     post_partition_states[0].build)
+
+    self.assertEqual('vendor', post_partition_states[1].partition_name)
+    self.assertEqual(['vendor-device-pro', 'vendor-device-std',
+                      'vendor-product-device'], post_partition_states[1].device)
+    self.assertEqual(vendor_fingerprints, post_partition_states[1].build)
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index 65092d8..7b7f22a 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -22,6 +22,7 @@
 import logging
 import os
 import os.path
+import re
 import struct
 import sys
 import unittest
@@ -224,13 +225,26 @@
         input_fp.seek(offset)
         if entry == 'metadata':
           expected = b'META-INF/COM/ANDROID/METADATA'
+        elif entry == 'metadata.pb':
+          expected = b'META-INF/COM/ANDROID/METADATA-PB'
         else:
           expected = entry.replace('.', '-').upper().encode()
         self.assertEqual(expected, input_fp.read(size))
 
 
 if __name__ == '__main__':
-  testsuite = unittest.TestLoader().discover(
-      os.path.dirname(os.path.realpath(__file__)))
+  # We only want to run tests from the top level directory. Unfortunately the
+  # pattern option of unittest.discover, internally using fnmatch, doesn't
+  # provide a good API to filter the test files based on directory. So we do an
+  # os walk and load them manually.
+  test_modules = []
+  base_path = os.path.dirname(os.path.realpath(__file__))
+  for dirpath, _, files in os.walk(base_path):
+    for fn in files:
+      if dirpath == base_path and re.match('test_.*\\.py$', fn):
+        test_modules.append(fn[:-3])
+
+  test_suite = unittest.TestLoader().loadTestsFromNames(test_modules)
+
   # atest needs a verbosity level of >= 2 to correctly parse the result.
-  unittest.TextTestRunner(verbosity=2).run(testsuite)
+  unittest.TextTestRunner(verbosity=2).run(test_suite)