Merge "Make kernel modules into normal installed files"
diff --git a/Changes.md b/Changes.md
index c40d521..461de97 100644
--- a/Changes.md
+++ b/Changes.md
@@ -388,7 +388,7 @@
 
 | instead of                                                   | use                  |
 |--------------------------------------------------------------|----------------------|
-| OUT {#OUT}                                                   | OUT_DIR              |
+| OUT {#OUT}                                                   | PRODUCT_OUT          |
 | ANDROID_HOST_OUT {#ANDROID_HOST_OUT}                         | HOST_OUT             |
 | ANDROID_PRODUCT_OUT {#ANDROID_PRODUCT_OUT}                   | PRODUCT_OUT          |
 | ANDROID_HOST_OUT_TESTCASES {#ANDROID_HOST_OUT_TESTCASES}     | HOST_OUT_TESTCASES   |
diff --git a/core/Makefile b/core/Makefile
index f41550f..92fef49 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -195,6 +195,7 @@
 
 BUILDINFO_SH := build/make/tools/buildinfo.sh
 BUILDINFO_COMMON_SH := build/make/tools/buildinfo_common.sh
+POST_PROCESS_PROPS :=$= build/make/tools/post_process_props.py
 
 # Generates a set of sysprops common to all partitions to a file.
 # $(1): Partition name
@@ -244,7 +245,7 @@
 
 intermediate_system_build_prop := $(call intermediates-dir-for,ETC,system_build_prop)/build.prop
 
-$(INSTALLED_DEFAULT_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(intermediate_system_build_prop)
+$(INSTALLED_DEFAULT_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(intermediate_system_build_prop)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
@@ -253,7 +254,7 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_DEFAULT_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@
+	$(hide) $(POST_PROCESS_PROPS) $@
 ifdef property_overrides_split_enabled
 	$(hide) mkdir -p $(TARGET_ROOT_OUT)
 	$(hide) ln -sf system/etc/prop.default $(INSTALLED_DEFAULT_PROP_OLD_TARGET)
@@ -266,7 +267,7 @@
 INSTALLED_VENDOR_DEFAULT_PROP_TARGET := $(TARGET_OUT_VENDOR)/default.prop
 ALL_DEFAULT_INSTALLED_MODULES += $(INSTALLED_VENDOR_DEFAULT_PROP_TARGET)
 
-$(INSTALLED_VENDOR_DEFAULT_PROP_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET)
+$(INSTALLED_VENDOR_DEFAULT_PROP_TARGET): $(INSTALLED_DEFAULT_PROP_TARGET) $(POST_PROCESS_PROPS)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo "#" > $@; \
@@ -274,7 +275,7 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_VENDOR_DEFAULT_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@
+	$(hide) $(POST_PROCESS_PROPS) $@
 
 endif  # property_overrides_split_enabled
 
@@ -391,7 +392,7 @@
 else
 system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
 endif
-$(intermediate_system_build_prop): $(BUILDINFO_SH) $(BUILDINFO_COMMON_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(API_FINGERPRINT)
+$(intermediate_system_build_prop): $(BUILDINFO_SH) $(BUILDINFO_COMMON_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(API_FINGERPRINT) $(POST_PROCESS_PROPS)
 	@echo Target buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
@@ -455,7 +456,7 @@
 	            echo "#" >> $@; )
 	$(hide) $(foreach line,$(FINAL_BUILD_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@ $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
+	$(hide) $(POST_PROCESS_PROPS) $@ $(PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
 
 build_desc :=
 
@@ -484,7 +485,7 @@
     $(FINAL_VENDOR_BUILD_PROPERTIES),=)
 endif  # property_overrides_split_enabled
 
-$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(intermediate_system_build_prop)
+$(INSTALLED_VENDOR_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(intermediate_system_build_prop)
 	@echo Target vendor buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
@@ -522,7 +523,7 @@
 	$(hide) $(foreach line,$(FINAL_VENDOR_BUILD_PROPERTIES), \
 	    echo "$(line)" >> $@;)
 endif  # property_overrides_split_enabled
-	$(hide) build/make/tools/post_process_props.py $@ $(PRODUCT_VENDOR_PROPERTY_BLACKLIST)
+	$(hide) $(POST_PROCESS_PROPS) $@ $(PRODUCT_VENDOR_PROPERTY_BLACKLIST)
 
 # -----------------------------------------------------------------
 # product build.prop
@@ -540,7 +541,7 @@
 FINAL_PRODUCT_PROPERTIES := $(call uniq-pairs-by-first-component, \
     $(FINAL_PRODUCT_PROPERTIES),=)
 
-$(INSTALLED_PRODUCT_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(product_prop_files)
+$(INSTALLED_PRODUCT_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS) $(product_prop_files)
 	@echo Target product buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
@@ -563,7 +564,7 @@
 	        echo "ro.build.characteristics=$(TARGET_AAPT_CHARACTERISTICS)" >> $@;
 	$(hide) $(foreach line,$(FINAL_PRODUCT_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@
+	$(hide) $(POST_PROCESS_PROPS) $@
 
 # ----------------------------------------------------------------
 # odm build.prop
@@ -575,7 +576,7 @@
 FINAL_ODM_BUILD_PROPERTIES := $(call uniq-pairs-by-first-component, \
     $(FINAL_ODM_BUILD_PROPERTIES),=)
 
-$(INSTALLED_ODM_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH)
+$(INSTALLED_ODM_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS)
 	@echo Target odm buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
@@ -588,7 +589,7 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_ODM_BUILD_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@
+	$(hide) $(POST_PROCESS_PROPS) $@
 
 # -----------------------------------------------------------------
 # product_services build.prop (unless it's merged into /product)
@@ -604,7 +605,7 @@
     $(call collapse-pairs, $(PRODUCT_PRODUCT_SERVICES_PROPERTIES))
 FINAL_PRODUCT_SERVICES_PROPERTIES := $(call uniq-pairs-by-first-component, \
     $(FINAL_PRODUCT_SERVICES_PROPERTIES),=)
-$(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH)
+$(INSTALLED_PRODUCT_SERVICES_BUILD_PROP_TARGET): $(BUILDINFO_COMMON_SH) $(POST_PROCESS_PROPS)
 	@echo Target product_services buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
@@ -616,7 +617,7 @@
 	        echo "#" >> $@;
 	$(hide) $(foreach line,$(FINAL_PRODUCT_SERVICES_PROPERTIES), \
 	    echo "$(line)" >> $@;)
-	$(hide) build/make/tools/post_process_props.py $@
+	$(hide) $(POST_PROCESS_PROPS) $@
 endif # MERGE_PRODUCT_SERVICES_INTO_PRODUCT
 
 # ----------------------------------------------------------------
@@ -906,12 +907,12 @@
 INSTALLED_FILES_FILE_ROOT := $(PRODUCT_OUT)/installed-files-root.txt
 INSTALLED_FILES_JSON_ROOT := $(INSTALLED_FILES_FILE_ROOT:.txt=.json)
 $(INSTALLED_FILES_FILE_ROOT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ROOT)
-$(INSTALLED_FILES_FILE_ROOT) : $(INTERNAL_ROOT_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_ROOT) : $(INTERNAL_ROOT_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
 
@@ -935,13 +936,13 @@
 INSTALLED_FILES_FILE_RAMDISK := $(PRODUCT_OUT)/installed-files-ramdisk.txt
 INSTALLED_FILES_JSON_RAMDISK := $(INSTALLED_FILES_FILE_RAMDISK:.txt=.json)
 $(INSTALLED_FILES_FILE_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RAMDISK)
-$(INSTALLED_FILES_FILE_RAMDISK) : $(INTERNAL_RAMDISK_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_RAMDISK) : $(INTERNAL_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(TARGET_RAMDISK_OUT)
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
@@ -1196,6 +1197,8 @@
 license_modules := $(ALL_DEFAULT_INSTALLED_MODULES) $(kernel_notice_file) $(pdk_fusion_notice_files)
 # Phonys/fakes don't have notice files (though their deps might)
 license_modules := $(filter-out $(TARGET_OUT_FAKE)/%,$(license_modules))
+# testcases are not relevant to the system image.
+license_modules := $(filter-out $(TARGET_OUT_TESTCASES)/%,$(license_modules))
 license_modules_vendor := $(filter $(TARGET_OUT_VENDOR)/%,$(license_modules))
 license_modules_product := $(filter $(TARGET_OUT_PRODUCT)/%,$(license_modules))
 license_modules_product_services := $(filter $(TARGET_OUT_PRODUCT_SERVICES)/%,$(license_modules))
@@ -1577,12 +1580,12 @@
 endif
 
 $(INSTALLED_FILES_FILE_RECOVERY): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RECOVERY)
-$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 recovery_initrc := $(call include-path-for, recovery)/etc/init.rc
 recovery_sepolicy := \
@@ -2024,12 +2027,12 @@
 # Because ramdisk-debug.img will rsync from either ramdisk.img or ramdisk-recovery.img.
 # Need to depend on the built ramdisk-debug.img, to get a complete list of the installed files.
 $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INSTALLED_DEBUG_RAMDISK_TARGET)
-$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	echo Installed file list: $@
 	mkdir -p $(dir $@)
 	rm -f $@
 	$(FILESLIST) $(DEBUG_RAMDISK_ROOT_DIR) > $(@:.txt=.json)
-	build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 # ramdisk-debug.img will rsync the content from either ramdisk.img or ramdisk-recovery.img,
 # depending on whether BOARD_USES_RECOVERY_AS_BOOT is set or not.
@@ -2162,12 +2165,12 @@
 INSTALLED_FILES_FILE := $(PRODUCT_OUT)/installed-files.txt
 INSTALLED_FILES_JSON := $(INSTALLED_FILES_FILE:.txt=.json)
 $(INSTALLED_FILES_FILE): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON)
-$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST)
+$(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
@@ -2642,12 +2645,12 @@
 INSTALLED_FILES_FILE_SYSTEMOTHER := $(PRODUCT_OUT)/installed-files-system-other.txt
 INSTALLED_FILES_JSON_SYSTEMOTHER := $(INSTALLED_FILES_FILE_SYSTEMOTHER:.txt=.json)
 $(INSTALLED_FILES_FILE_SYSTEMOTHER): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEMOTHER)
-$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 # Determines partition size for system_other.img.
 ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
@@ -2792,12 +2795,12 @@
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
 INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
 $(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
-$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
-	$(hide) build/make/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
@@ -2848,12 +2851,12 @@
 INSTALLED_FILES_FILE_PRODUCT := $(PRODUCT_OUT)/installed-files-product.txt
 INSTALLED_FILES_JSON_PRODUCT := $(INSTALLED_FILES_FILE_PRODUCT:.txt=.json)
 $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
-$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 productimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,product)
@@ -2946,12 +2949,12 @@
 INSTALLED_FILES_FILE_PRODUCT_SERVICES := $(PRODUCT_OUT)/installed-files-product_services.txt
 INSTALLED_FILES_JSON_PRODUCT_SERVICES := $(INSTALLED_FILES_FILE_PRODUCT_SERVICES:.txt=.json)
 $(INSTALLED_FILES_FILE_PRODUCT_SERVICES): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT_SERVICES)
-$(INSTALLED_FILES_FILE_PRODUCT_SERVICES) : $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_PRODUCT_SERVICES) : $(INTERNAL_PRODUCT_SERVICESIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT_SERVICES) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 product_servicesimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,product_services)
@@ -2997,12 +3000,12 @@
 INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
 INSTALLED_FILES_JSON_ODM := $(INSTALLED_FILES_FILE_ODM:.txt=.json)
 $(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
-$(INSTALLED_FILES_FILE_ODM) : $(INTERNAL_ODMIMAGE_FILES) $(FILESLIST)
+$(INSTALLED_FILES_FILE_ODM) : $(INTERNAL_ODMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	@mkdir -p $(dir $@)
 	@rm -f $@
 	$(hide) $(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
-	$(hide) build/tools/fileslist_util.py -c $(@:.txt=.json) > $@
+	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 odmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,odm)
@@ -3419,6 +3422,10 @@
 $(if $(filter true,$(AB_OTA_UPDATER)),$(if $(filter true,$(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS)),,_a))
 endef
 
+ifndef BOARD_SUPER_PARTITION_WARN_LIMIT
+BOARD_SUPER_PARTITION_WARN_LIMIT := $$(($(BOARD_SUPER_PARTITION_SIZE) * 95 / 100))
+endif
+
 droid_targets: check-all-partition-sizes
 
 .PHONY: check-all-partition-sizes check-all-partition-sizes-nodeps
@@ -3453,6 +3460,8 @@
 # $(1): human-readable max size string
 # $(2): max size expression
 # $(3): list of partition names
+# $(4): human-readable warn size string
+# $(5): warn size expression
 define check-sum-of-partition-sizes
   partition_size_list="$$(for i in $(call read-size-of-partitions,$(3)); do \
     echo $(call round-partition-size,$${i}); \
@@ -3463,6 +3472,11 @@
     echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' "$(2)" '==' $$(( $(2) )); \
     exit 1; \
   else \
+    if [[ ! -z "$(5)" ]] && [ $$(( $${sum_sizes_expr} )) -gt $$(( $(5) )) ]; then \
+        echo "!!!! WARNING !!!! The sum of sizes of [$(strip $(3))] is larger than $(strip $(4)):"; \
+        echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '>' "$(5)" '==' $$(( $(5) )); \
+        echo "Super partition is" $$(( $$(( $$(( $${sum_sizes_expr} )) * 100)) / $$(( $(2) )) )) "percent occupied!"; \
+    fi; \
     echo "The sum of sizes of [$(strip $(3))] is within $(strip $(1)):"; \
     echo $${sum_sizes_expr} '==' $$(( $${sum_sizes_expr} )) '<=' "$(2)" '==' $$(( $(2) )); \
   fi;
@@ -3472,7 +3486,10 @@
   # Check sum(all partitions) <= super partition (/ 2 for A/B devices launched with dynamic partitions)
   $(if $(BOARD_SUPER_PARTITION_SIZE),$(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
     $(call check-sum-of-partition-sizes,BOARD_SUPER_PARTITION_SIZE$(if $(call super-slot-suffix), / 2), \
-      $(BOARD_SUPER_PARTITION_SIZE)$(if $(call super-slot-suffix), / 2),$(BOARD_SUPER_PARTITION_PARTITION_LIST))))
+      $(BOARD_SUPER_PARTITION_SIZE)$(if $(call super-slot-suffix), / 2),$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+      BOARD_SUPER_PARTITION_WARN_LIMIT$(if $(call super-slot-suffix), / 2), \
+      $(BOARD_SUPER_PARTITION_WARN_LIMIT)$(if $(call super-slot-suffix), / 2)) \
+  ))
 
   # For each group, check sum(partitions in group) <= group size
   $(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
@@ -4194,8 +4211,9 @@
 	    $(if $(_group_partition_list), \
 	        echo "$(group)_partition_list=$(_group_partition_list)" >> $(zip_root)/META/dynamic_partitions_info.txt;))
 endif # BOARD_SUPER_PARTITION_GROUPS
+	@# TODO(b/134525174): Remove `-r` after addressing the issue with recovery patch generation.
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	    build/make/tools/releasetools/add_img_to_target_files -a -v -p $(HOST_OUT) $(zip_root)
+	    build/make/tools/releasetools/add_img_to_target_files -a -r -v -p $(HOST_OUT) $(zip_root)
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
 	$(hide) find $(zip_root)/META | sort >$@.list
@@ -4347,11 +4365,10 @@
 # -----------------------------------------------------------------
 # A zip of the coverage directory.
 #
-name := $(TARGET_PRODUCT)
+name := gcov-report-files-all
 ifeq ($(TARGET_BUILD_TYPE),debug)
 name := $(name)_debug
 endif
-name := $(name)-coverage-$(FILE_NAME_TAG)
 COVERAGE_ZIP := $(PRODUCT_OUT)/$(name).zip
 ifndef TARGET_BUILD_APPS
 $(COVERAGE_ZIP): $(INSTALLED_SYSTEMIMAGE_TARGET) \
@@ -4456,6 +4473,7 @@
 
 # For real devices and for dist builds, build super image from target files to an intermediate directory.
 INTERNAL_SUPERIMAGE_DIST_TARGET := $(call intermediates-dir-for,PACKAGING,super.img)/super.img
+INTERNAL_SUPERIMAGE_MISC_INFO := $(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt
 $(INTERNAL_SUPERIMAGE_DIST_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
 $(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_PACKAGE) $(BUILD_SUPER_IMAGE)
 	$(call pretty,"Target super fs image from target files: $@")
@@ -4507,7 +4525,7 @@
 $(INSTALLED_SUPERIMAGE_TARGET): $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
 	$(call pretty,"Target super fs image for debug: $@")
 	$(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
-	  $(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt)
+	  $(INTERNAL_SUPERIMAGE_MISC_INFO))
 
 droidcore: $(INSTALLED_SUPERIMAGE_TARGET)
 
@@ -4581,6 +4599,7 @@
 	    $(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
 	      OTA/super_$(device).img:super_$(device).img)) \
 	  OTA/android-info.txt:android-info.txt "IMAGES/*.img:."
+	$(if $(INTERNAL_SUPERIMAGE_MISC_INFO), zip -q -j -u $@ $(INTERNAL_SUPERIMAGE_MISC_INFO))
 	$(if $(INTERNAL_SUPERIMAGE_DIST_TARGET), zip -q -j -u $@ $(INTERNAL_SUPERIMAGE_DIST_TARGET))
 else
 $(INTERNAL_UPDATE_PACKAGE_TARGET):
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ed5820b..6c7e883 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -887,13 +887,13 @@
 # Use $(LOCAL_MODULE) instead of $(my_register_name) to ignore module's bitness.
 ALL_DEPS.MODULES := $(ALL_DEPS.MODULES) $(LOCAL_MODULE)
 ALL_DEPS.$(LOCAL_MODULE).ALL_DEPS := $(sort \
-  $(ALL_MODULES.$(LOCAL_MODULE).ALL_DEPS) \
+  $(ALL_DEPS.$(LOCAL_MODULE).ALL_DEPS) \
   $(LOCAL_STATIC_LIBRARIES) \
   $(LOCAL_WHOLE_STATIC_LIBRARIES) \
   $(LOCAL_SHARED_LIBRARIES) \
   $(LOCAL_HEADER_LIBRARIES) \
   $(LOCAL_STATIC_JAVA_LIBRARIES) \
-  $(LOCAL_JAVA_LIBRARIES)\
+  $(LOCAL_JAVA_LIBRARIES) \
   $(LOCAL_JNI_SHARED_LIBRARIES))
 
 ALL_DEPS.$(LOCAL_MODULE).LICENSE := $(sort $(ALL_DEPS.$(LOCAL_MODULE).LICENSE) $(license_files))
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index b936bd7..2bf4fdc 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -18,6 +18,11 @@
 # Internal build rules for native prebuilt modules
 ############################################################
 
+prebuilt_module_classes := STATIC_LIBRARIES SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS
+ifeq ($(filter $(prebuilt_module_classes),$(LOCAL_MODULE_CLASS)),)
+$(call pretty-error,cc_prebuilt_internal.mk is for $(prebuilt_module_classes) modules only)
+endif
+
 my_strip_module := $(firstword \
   $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
   $(LOCAL_STRIP_MODULE))
diff --git a/core/combo/javac.mk b/core/combo/javac.mk
index dac2628..32a5c9e 100644
--- a/core/combo/javac.mk
+++ b/core/combo/javac.mk
@@ -16,4 +16,5 @@
 
 # TODO(ccross): remove this, it is needed for now because it is used by
 # config.mk before makevars from soong are loaded
-JAVA := $(ANDROID_JAVA_TOOLCHAIN)/java
+JAVA := $(ANDROID_JAVA_TOOLCHAIN)/java -XX:OnError="cat hs_err_pid%p.log" -XX:CICompilerCount=6 -XX:+UseDynamicNumberOfGCThreads
+
diff --git a/core/config.mk b/core/config.mk
index 4a26a76..a3365d2 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -522,6 +522,7 @@
 CKATI := $(prebuilt_build_tools_bin)/ckati
 DEPMOD := $(HOST_OUT_EXECUTABLES)/depmod
 FILESLIST := $(SOONG_HOST_OUT_EXECUTABLES)/fileslist
+FILESLIST_UTIL :=$= build/make/tools/fileslist_util.py
 HOST_INIT_VERIFIER := $(HOST_OUT_EXECUTABLES)/host_init_verifier
 MAKEPARALLEL := $(prebuilt_build_tools_bin)/makeparallel
 SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
@@ -808,6 +809,7 @@
     26.0 \
     27.0 \
     28.0 \
+    29.0 \
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
diff --git a/core/definitions.mk b/core/definitions.mk
index 5d3227a..b18cc72 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1082,7 +1082,7 @@
         $(PRIVATE_PROTOC_FLAGS) \
         $$f || exit 33; \
         done
-$(hide) touch $@
+$(SOONG_ZIP) -o $@ -C $(PRIVATE_PROTO_JAVA_OUTPUT_DIR) -D $(PRIVATE_PROTO_JAVA_OUTPUT_DIR)
 endef
 
 ######################################################################
@@ -1838,6 +1838,16 @@
 $(transform-host-o-to-executable-inner)
 endef
 
+###########################################################
+## Commands for packaging native coverage files
+###########################################################
+define package-coverage-files
+  @rm -f $@ $@.lst $@.premerged
+  @touch $@.lst
+  $(foreach obj,$(strip $(PRIVATE_ALL_OBJECTS)), $(hide) echo $(obj) >> $@.lst$(newline))
+  $(hide) $(SOONG_ZIP) -o $@.premerged -C $(OUT_DIR) -l $@.lst
+  $(hide) $(MERGE_ZIPS) -ignore-duplicates $@ $@.premerged $(strip $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES))
+endef
 
 ###########################################################
 ## Commands for running javac to make .class files
@@ -2013,8 +2023,6 @@
 $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
     find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' -and -not -name '.*' >> $(1); \
 fi
-$(if $(PRIVATE_HAS_PROTO_SOURCES), \
-    $(hide) find $(PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR) -name '*.java' -and -not -name '.*' >> $(1))
 endef
 
 # Some historical notes:
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index c28c144..558e49b 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -84,7 +84,7 @@
 endif
 
 ifeq ($(my_native_coverage),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
 
 built_whole_gcno_libraries := \
     $(foreach lib,$(my_whole_static_libraries), \
@@ -106,11 +106,11 @@
 
 GCNO_ARCHIVE := $(my_installed_module_stem)$(gcno_suffix)
 
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries)) $(strip $(built_static_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
 $(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries) $(built_static_gcno_libraries)
-	$(transform-o-to-static-lib)
+	$(package-coverage-files)
 
 $(my_coverage_path)/$(GCNO_ARCHIVE) : $(intermediates)/$(GCNO_ARCHIVE)
 	$(copy-file-to-target)
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 423575c..8e655ff 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -73,7 +73,6 @@
 java_sources_deps := \
     $(java_sources) \
     $(java_resource_sources) \
-    $(proto_java_sources_file_stamp) \
     $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index a29a1b8..6c23789 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -64,7 +64,6 @@
 java_sources_deps := \
     $(java_sources) \
     $(java_resource_sources) \
-    $(proto_java_sources_file_stamp) \
     $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
diff --git a/core/java.mk b/core/java.mk
index 41a1686..449da11 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -233,11 +233,10 @@
 java_sources_deps := \
     $(java_sources) \
     $(java_resource_sources) \
-    $(proto_java_sources_file_stamp) \
     $(LOCAL_SRCJARS) \
     $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
-$(java_source_list_file): $(java_sources_deps)
+$(java_source_list_file): $(java_sources_deps) $(NORMALIZE_PATH)
 	$(write-java-source-list)
 
 ALL_MODULES.$(my_register_name).SRCJARS := $(LOCAL_SRCJARS)
diff --git a/core/java_common.mk b/core/java_common.mk
index db5b6c3..9909885 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -67,37 +67,36 @@
   LOCAL_PROTOC_OPTIMIZE_TYPE := lite
 endif
 proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
-# Because names of the .java files compiled from .proto files are unknown until the
-# .proto files are compiled, we use a timestamp file as depedency.
-proto_java_sources_file_stamp :=
 ifneq ($(proto_sources),)
 proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources))
 
 proto_java_intemediate_dir := $(intermediates.COMMON)/proto
-proto_java_sources_file_stamp := $(proto_java_intemediate_dir)/Proto.stamp
 proto_java_sources_dir := $(proto_java_intemediate_dir)/src
+proto_java_srcjar := $(intermediates.COMMON)/proto.srcjar
 
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_INCLUDES := $(TOP)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_SRC_FILES := $(proto_sources_fullpath)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_DIR := $(proto_java_sources_dir)
-$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
+LOCAL_SRCJARS += $(proto_java_srcjar)
+
+$(proto_java_srcjar): PRIVATE_PROTO_INCLUDES := $(TOP)
+$(proto_java_srcjar): PRIVATE_PROTO_SRC_FILES := $(proto_sources_fullpath)
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_DIR := $(proto_java_sources_dir)
+$(proto_java_srcjar): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
 ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javamicro_out
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javamicro_out
 else
   ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nano)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javanano_out
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javanano_out
   else
     ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),stream)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javastream_out
-$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS += --plugin=$(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
-$(proto_java_sources_file_stamp): $(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javastream_out
+$(proto_java_srcjar): PRIVATE_PROTOC_FLAGS += --plugin=$(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
+$(proto_java_srcjar): $(HOST_OUT_EXECUTABLES)/protoc-gen-javastream
     else
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --java_out
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --java_out
     endif
   endif
 endif
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(if $(filter lite,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite$(if $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS),:,),)$(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
-$(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
+$(proto_java_srcjar): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(if $(filter lite,$(LOCAL_PROTOC_OPTIMIZE_TYPE)),lite$(if $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS),:,),)$(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
+$(proto_java_srcjar) : $(proto_sources_fullpath) $(PROTOC) $(SOONG_ZIP)
 	$(call transform-proto-to-java)
 
 #TODO: protoc should output the dependencies introduced by imports.
@@ -231,8 +230,6 @@
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANNO_INTERMEDIATES_DIR := $(intermediates.COMMON)/anno
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_PROTO_SOURCES := $(if $(proto_sources),true)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROTO_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/proto
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_RS_SOURCES :=
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCE_LIST := $(java_source_list_file)
diff --git a/core/misc_prebuilt_internal.mk b/core/misc_prebuilt_internal.mk
new file mode 100644
index 0000000..cdd5cd5
--- /dev/null
+++ b/core/misc_prebuilt_internal.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2019 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+############################################################
+# Internal build rules for misc prebuilt modules that don't need additional processing
+############################################################
+
+prebuilt_module_classes := SCRIPT ETC DATA
+ifeq ($(filter $(prebuilt_module_classes),$(LOCAL_MODULE_CLASS)),)
+$(call pretty-error,misc_prebuilt_internal.mk is for $(prebuilt_module_classes) modules only)
+endif
+
+include $(BUILD_SYSTEM)/base_rules.mk
+
+$(LOCAL_BUILT_MODULE) : $(my_prebuilt_src_file)
+	$(transform-prebuilt-to-target)
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index f5b92fe..ad62e1a 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -49,9 +49,12 @@
   include $(BUILD_SYSTEM)/app_prebuilt_internal.mk
 else ifeq (JAVA_LIBRARIES,$(LOCAL_MODULE_CLASS))
   include $(BUILD_SYSTEM)/java_prebuilt_internal.mk
-else
-  # TODO(jungjw): Check LOCAL_MODULE_CLASS value and generate an error for unexpected ones.
+else ifneq ($(filter STATIC_LIBRARIES SHARED_LIBRARIES EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
   include $(BUILD_SYSTEM)/cc_prebuilt_internal.mk
+else ifneq ($(filter SCRIPT ETC DATA,$(LOCAL_MODULE_CLASS)),)
+  include $(BUILD_SYSTEM)/misc_prebuilt_internal.mk
+else
+  $(error $(LOCAL_MODULE) : unexpected LOCAL_MODULE_CLASS for prebuilts: $(LOCAL_MODULE_CLASS))
 endif
 
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index 44bb020..858884a 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -77,7 +77,7 @@
 	$(transform-o-to-shared-lib)
 
 ifeq ($(my_native_coverage),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
 
 built_whole_gcno_libraries := \
     $(foreach lib,$(my_whole_static_libraries), \
@@ -99,11 +99,11 @@
 
 GCNO_ARCHIVE := $(basename $(my_installed_module_stem))$(gcno_suffix)
 
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries)) $(strip $(built_static_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
 $(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries) $(built_static_gcno_libraries)
-	$(transform-o-to-static-lib)
+	$(package-coverage-files)
 
 $(my_coverage_path)/$(GCNO_ARCHIVE) : $(intermediates)/$(GCNO_ARCHIVE)
 	$(copy-file-to-target)
diff --git a/core/soong_cc_prebuilt.mk b/core/soong_cc_prebuilt.mk
index 55dd077..301f985 100644
--- a/core/soong_cc_prebuilt.mk
+++ b/core/soong_cc_prebuilt.mk
@@ -187,14 +187,14 @@
 
 ifeq ($(NATIVE_COVERAGE),true)
   ifneq (,$(strip $(LOCAL_PREBUILT_COVERAGE_ARCHIVE)))
-    $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).gcnodir))
+    $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(intermediates)/$(LOCAL_MODULE).zip))
     ifneq ($(LOCAL_UNINSTALLABLE_MODULE),true)
       ifdef LOCAL_IS_HOST_MODULE
         my_coverage_path := $($(my_prefix)OUT_COVERAGE)/$(patsubst $($(my_prefix)OUT)/%,%,$(my_module_path))
       else
         my_coverage_path := $(TARGET_OUT_COVERAGE)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
       endif
-      my_coverage_path := $(my_coverage_path)/$(patsubst %.so,%,$(my_installed_module_stem)).gcnodir
+      my_coverage_path := $(my_coverage_path)/$(patsubst %.so,%,$(my_installed_module_stem)).zip
       $(eval $(call copy-one-file,$(LOCAL_PREBUILT_COVERAGE_ARCHIVE),$(my_coverage_path)))
       $(LOCAL_BUILT_MODULE): $(my_coverage_path)
     endif
@@ -202,13 +202,12 @@
     # Coverage information is needed when static lib is a dependency of another
     # coverage-enabled module.
     ifeq (STATIC_LIBRARIES, $(LOCAL_MODULE_CLASS))
-      GCNO_ARCHIVE := $(LOCAL_MODULE).gcnodir
+      GCNO_ARCHIVE := $(LOCAL_MODULE).zip
+      $(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
       $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS :=
       $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES :=
-      $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_PREFIX := $(my_prefix)
-      $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
       $(intermediates)/$(GCNO_ARCHIVE) :
-	$(transform-o-to-static-lib)
+	$(package-coverage-files)
     endif
   endif
 endif
diff --git a/core/static_library_internal.mk b/core/static_library_internal.mk
index 6b4d22f..f82e501 100644
--- a/core/static_library_internal.mk
+++ b/core/static_library_internal.mk
@@ -25,7 +25,7 @@
 	$(transform-o-to-static-lib)
 
 ifeq ($(NATIVE_COVERAGE),true)
-gcno_suffix := .gcnodir
+gcno_suffix := .zip
 
 built_whole_gcno_libraries := \
     $(foreach lib,$(my_whole_static_libraries), \
@@ -35,11 +35,9 @@
 
 GCNO_ARCHIVE := $(LOCAL_MODULE)$(gcno_suffix)
 
+$(intermediates)/$(GCNO_ARCHIVE) : $(SOONG_ZIP) $(MERGE_ZIPS)
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_OBJECTS := $(strip $(LOCAL_GCNO_FILES))
 $(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_ALL_WHOLE_STATIC_LIBRARIES := $(strip $(built_whole_gcno_libraries))
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_PREFIX := $(my_prefix)
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
-$(intermediates)/$(GCNO_ARCHIVE) : PRIVATE_INTERMEDIATES_DIR := $(intermediates)
 $(intermediates)/$(GCNO_ARCHIVE) : $(LOCAL_GCNO_FILES) $(built_whole_gcno_libraries)
-	$(transform-o-to-static-lib)
+	$(package-coverage-files)
 endif
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 62c91f8..0a798d5 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -252,7 +252,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2019-05-05
+      PLATFORM_SECURITY_PATCH := 2019-06-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 971a7b2..c8705c3 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -19,7 +19,7 @@
 ifndef board_info_txt
 board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
 endif
-$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt)
+$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) build/make/tools/check_radio_versions.py
 	$(hide) build/make/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
 	$(call pretty,"Generated: ($@)")
 ifdef board_info_txt
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index be7c804..be014bf 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -41,3 +41,6 @@
 
 # Generate an APEX image for experiment b/119800099.
 DEXPREOPT_GENERATE_APEX_IMAGE := true
+
+# Mainline devices support apex
+TARGET_FLATTEN_APEX := false
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 606a605..7bec975 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -281,6 +281,7 @@
     fastboot \
     flags_health_check \
     icu-data_host_runtime_apex \
+    icu_tzdata.dat_host_tzdata_apex \
     incident_report \
     ld.mc \
     lpdump \
@@ -299,9 +300,12 @@
     viewcompiler \
     tzdata_host \
     tzdata_host_runtime_apex \
+    tzdata_host_tzdata_apex \
     tzlookup.xml_host_runtime_apex \
+    tzlookup.xml_host_tzdata_apex \
     tz_version_host \
     tz_version_host_runtime_apex \
+    tz_version_host_tzdata_apex \
 
 ifeq ($(TARGET_CORE_JARS),)
 $(error TARGET_CORE_JARS is empty; cannot initialize PRODUCT_BOOT_JARS variable)
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index c45f870..bb39cd9 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -49,6 +49,7 @@
     gralloc.default \
     group \
     init_vendor \
+    libashmemd_hidl_client \
     libbundlewrapper \
     libclearkeycasplugin \
     libdownmix \
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 4156c8b..44719ed 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -28,7 +28,8 @@
 
   -r  (--rebuild_recovery)
       Rebuild the recovery patch and write it to the system image. Only
-      meaningful when system image needs to be rebuilt.
+      meaningful when system image needs to be rebuilt and there're separate
+      boot / recovery images.
 
   --replace_verity_private_key
       Replace the private key used for verity signing. (same as the option
@@ -164,7 +165,8 @@
       else:
         common.ZipWrite(output_zip, ofile.name, arc_name)
 
-  if OPTIONS.rebuild_recovery:
+  if (OPTIONS.rebuild_recovery and recovery_img is not None and
+      boot_img is not None):
     logger.info("Building new recovery patch")
     common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
                              boot_img, info_dict=OPTIONS.info_dict)
@@ -448,7 +450,7 @@
     if partition not in needed_partitions:
       continue
     assert (partition in common.AVB_PARTITIONS or
-            partition.startswith('vbmeta_')), \
+            partition in common.AVB_VBMETA_PARTITIONS), \
         'Unknown partition: {}'.format(partition)
     assert os.path.exists(path), \
         'Failed to find {} for {}'.format(path, partition)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 9d67c49..80f8002 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -99,6 +99,9 @@
 AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'product_services',
                   'recovery', 'system', 'vendor')
 
+# Chained VBMeta partitions.
+AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
+
 # Partitions that should have their care_map added to META/care_map.pb
 PARTITIONS_WITH_CARE_MAP = ('system', 'vendor', 'product', 'product_services',
                             'odm')
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index f900b12..f37c0ee 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -15,31 +15,31 @@
 # the License.
 """This script merges two partial target files packages.
 
-One package contains system files, and the other contains non-system files.
+One package contains framework files, and the other contains vendor files.
 It produces a complete target files package that can be used to generate an
 OTA package.
 
 Usage: merge_target_files.py [args]
 
-  --system-target-files system-target-files-zip-archive
-      The input target files package containing system bits. This is a zip
+  --framework-target-files framework-target-files-zip-archive
+      The input target files package containing framework bits. This is a zip
       archive.
 
-  --system-item-list system-item-list-file
+  --framework-item-list framework-item-list-file
       The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_SYSTEM_ITEM_LIST if provided.
+      contents of DEFAULT_FRAMEWORK_ITEM_LIST if provided.
 
-  --system-misc-info-keys system-misc-info-keys-file
+  --framework-misc-info-keys framework-misc-info-keys-file
       The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_SYSTEM_MISC_INFO_KEYS if provided.
+      contents of DEFAULT_FRAMEWORK_MISC_INFO_KEYS if provided.
 
-  --other-target-files other-target-files-zip-archive
-      The input target files package containing other bits. This is a zip
+  --vendor-target-files vendor-target-files-zip-archive
+      The input target files package containing vendor bits. This is a zip
       archive.
 
-  --other-item-list other-item-list-file
+  --vendor-item-list vendor-item-list-file
       The optional path to a newline-separated config file that replaces the
-      contents of DEFAULT_OTHER_ITEM_LIST if provided.
+      contents of DEFAULT_VENDOR_ITEM_LIST if provided.
 
   --output-target-files output-target-files-package
       If provided, the output merged target files package. Also a zip archive.
@@ -93,11 +93,11 @@
 logger = logging.getLogger(__name__)
 OPTIONS = common.OPTIONS
 OPTIONS.verbose = True
-OPTIONS.system_target_files = None
-OPTIONS.system_item_list = None
-OPTIONS.system_misc_info_keys = None
-OPTIONS.other_target_files = None
-OPTIONS.other_item_list = None
+OPTIONS.framework_target_files = None
+OPTIONS.framework_item_list = None
+OPTIONS.framework_misc_info_keys = None
+OPTIONS.vendor_target_files = None
+OPTIONS.vendor_item_list = None
 OPTIONS.output_target_files = None
 OPTIONS.output_dir = None
 OPTIONS.output_item_list = None
@@ -107,12 +107,12 @@
 OPTIONS.rebuild_recovery = False
 OPTIONS.keep_tmp = False
 
-# DEFAULT_SYSTEM_ITEM_LIST is a list of items to extract from the partial
-# system target files package as is, meaning these items will land in the
+# DEFAULT_FRAMEWORK_ITEM_LIST is a list of items to extract from the partial
+# framework target files package as is, meaning these items will land in the
 # output target files package exactly as they appear in the input partial
-# system target files package.
+# framework target files package.
 
-DEFAULT_SYSTEM_ITEM_LIST = (
+DEFAULT_FRAMEWORK_ITEM_LIST = (
     'META/apkcerts.txt',
     'META/filesystem_config.txt',
     'META/root_filesystem_config.txt',
@@ -124,17 +124,18 @@
     'SYSTEM/*',
 )
 
-# SYSTEM_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
-# partial system target files package that need some special processing, such
-# as some sort of combination with items from the partial other target files
+# FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
+# partial framework target files package that need some special processing, such
+# as some sort of combination with items from the partial vendor target files
 # package.
 
-SYSTEM_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
+FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
 
-# DEFAULT_SYSTEM_MISC_INFO_KEYS is a list of keys to obtain from the system
-# instance of META/misc_info.txt. The remaining keys from the other instance.
+# DEFAULT_FRAMEWORK_MISC_INFO_KEYS is a list of keys to obtain from the
+# framework instance of META/misc_info.txt. The remaining keys from the
+# vendor instance.
 
-DEFAULT_SYSTEM_MISC_INFO_KEYS = (
+DEFAULT_FRAMEWORK_MISC_INFO_KEYS = (
     'avb_system_hashtree_enable',
     'avb_system_add_hashtree_footer_args',
     'avb_system_key_path',
@@ -151,12 +152,12 @@
     'system_size',
 )
 
-# DEFAULT_OTHER_ITEM_LIST is a list of items to extract from the partial
-# other target files package as is, meaning these items will land in the output
-# target files package exactly as they appear in the input partial other target
+# DEFAULT_VENDOR_ITEM_LIST is a list of items to extract from the partial
+# vendor target files package as is, meaning these items will land in the output
+# target files package exactly as they appear in the input partial vendor target
 # files package.
 
-DEFAULT_OTHER_ITEM_LIST = (
+DEFAULT_VENDOR_ITEM_LIST = (
     'META/boot_filesystem_config.txt',
     'META/otakeys.txt',
     'META/releasetools.py',
@@ -172,17 +173,17 @@
     'VENDOR/*',
 )
 
-# OTHER_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
-# partial other target files package that need some special processing, such as
-# some sort of combination with items from the partial system target files
+# VENDOR_EXTRACT_SPECIAL_ITEM_LIST is a list of items to extract from the
+# partial vendor target files package that need some special processing, such as
+# some sort of combination with items from the partial framework target files
 # package.
 
-OTHER_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
+VENDOR_EXTRACT_SPECIAL_ITEM_LIST = ('META/*',)
 
 # The merge config lists should not attempt to extract items from both
 # builds for any of the following partitions. The partitions in
 # SINGLE_BUILD_PARTITIONS should come entirely from a single build (either
-# system or other, but not both).
+# framework or vendor, but not both).
 
 SINGLE_BUILD_PARTITIONS = (
     'BOOT/',
@@ -211,9 +212,7 @@
       be overridden if it exists.
   """
   with open(path, 'w') as output:
-    sorted_data = sorted(data.keys()) if isinstance(data,
-                                                    dict) else sorted(data)
-    for entry in sorted_data:
+    for entry in sorted(data):
       out_str = '{}={}\n'.format(entry, data[entry]) if isinstance(
           data, dict) else '{}\n'.format(entry)
       output.write(out_str)
@@ -295,28 +294,29 @@
     return config_file.read().splitlines()
 
 
-def validate_config_lists(system_item_list, system_misc_info_keys,
-                          other_item_list):
+def validate_config_lists(framework_item_list, framework_misc_info_keys,
+                          vendor_item_list):
   """Performs validations on the merge config lists.
 
   Args:
-    system_item_list: The list of items to extract from the partial system
+    framework_item_list: The list of items to extract from the partial framework
       target files package as is.
-    system_misc_info_keys: A list of keys to obtain from the system instance of
-      META/misc_info.txt. The remaining keys from the other instance.
-    other_item_list: The list of items to extract from the partial other target
-      files package as is.
+    framework_misc_info_keys: A list of keys to obtain from the framework
+      instance of META/misc_info.txt. The remaining keys from the vendor
+      instance.
+    vendor_item_list: The list of items to extract from the partial vendor
+      target files package as is.
 
   Returns:
     False if a validation fails, otherwise true.
   """
   has_error = False
 
-  default_combined_item_set = set(DEFAULT_SYSTEM_ITEM_LIST)
-  default_combined_item_set.update(DEFAULT_OTHER_ITEM_LIST)
+  default_combined_item_set = set(DEFAULT_FRAMEWORK_ITEM_LIST)
+  default_combined_item_set.update(DEFAULT_VENDOR_ITEM_LIST)
 
-  combined_item_set = set(system_item_list)
-  combined_item_set.update(other_item_list)
+  combined_item_set = set(framework_item_list)
+  combined_item_set.update(vendor_item_list)
 
   # Check that the merge config lists are not missing any item specified
   # by the default config lists.
@@ -324,36 +324,37 @@
   if difference:
     logger.error('Missing merge config items: %s', list(difference))
     logger.error('Please ensure missing items are in either the '
-                 'system-item-list or other-item-list files provided to '
+                 'framework-item-list or vendor-item-list files provided to '
                  'this script.')
     has_error = True
 
   for partition in SINGLE_BUILD_PARTITIONS:
-    in_system = any(item.startswith(partition) for item in system_item_list)
-    in_other = any(item.startswith(partition) for item in other_item_list)
-    if in_system and in_other:
+    in_framework = any(
+        item.startswith(partition) for item in framework_item_list)
+    in_vendor = any(item.startswith(partition) for item in vendor_item_list)
+    if in_framework and in_vendor:
       logger.error(
-          'Cannot extract items from {0} for both the system and other builds. '
+          'Cannot extract items from {0} for both the framework and vendor builds. '
           'Please ensure only one merge config item list includes {0}.'.format(
               partition))
       has_error = True
 
-  if ('dynamic_partition_list' in system_misc_info_keys) or (
-      'super_partition_groups' in system_misc_info_keys):
+  if ('dynamic_partition_list' in framework_misc_info_keys) or (
+      'super_partition_groups' in framework_misc_info_keys):
     logger.error('Dynamic partition misc info keys should come from '
-                 'the other instance of META/misc_info.txt.')
+                 'the vendor instance of META/misc_info.txt.')
     has_error = True
 
   return not has_error
 
 
-def process_ab_partitions_txt(system_target_files_temp_dir,
-                              other_target_files_temp_dir,
+def process_ab_partitions_txt(framework_target_files_temp_dir,
+                              vendor_target_files_temp_dir,
                               output_target_files_temp_dir):
   """Perform special processing for META/ab_partitions.txt.
 
   This function merges the contents of the META/ab_partitions.txt files from
-  the system directory and the other directory, placing the merged result in
+  the framework directory and the vendor directory, placing the merged result in
   the output directory. The precondition in that the files are already
   extracted. The post condition is that the output META/ab_partitions.txt
   contains the merged content. The format for each ab_partitions.txt a one
@@ -361,28 +362,28 @@
   names.
 
   Args:
-    system_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the system target files package.
-    other_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the other target files package.
+    framework_target_files_temp_dir: The name of a directory containing the
+      special items extracted from the framework target files package.
+    vendor_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the vendor target files package.
     output_target_files_temp_dir: The name of a directory that will be used to
       create the output target files package after all the special cases are
       processed.
   """
 
-  system_ab_partitions_txt = os.path.join(system_target_files_temp_dir, 'META',
+  framework_ab_partitions_txt = os.path.join(framework_target_files_temp_dir,
+                                             'META', 'ab_partitions.txt')
+
+  vendor_ab_partitions_txt = os.path.join(vendor_target_files_temp_dir, 'META',
                                           'ab_partitions.txt')
 
-  other_ab_partitions_txt = os.path.join(other_target_files_temp_dir, 'META',
-                                         'ab_partitions.txt')
+  with open(framework_ab_partitions_txt) as f:
+    framework_ab_partitions = f.read().splitlines()
 
-  with open(system_ab_partitions_txt) as f:
-    system_ab_partitions = f.read().splitlines()
+  with open(vendor_ab_partitions_txt) as f:
+    vendor_ab_partitions = f.read().splitlines()
 
-  with open(other_ab_partitions_txt) as f:
-    other_ab_partitions = f.read().splitlines()
-
-  output_ab_partitions = set(system_ab_partitions + other_ab_partitions)
+  output_ab_partitions = set(framework_ab_partitions + vendor_ab_partitions)
 
   output_ab_partitions_txt = os.path.join(output_target_files_temp_dir, 'META',
                                           'ab_partitions.txt')
@@ -416,8 +417,8 @@
             'selabel=u:object_r:install_recovery_exec:s0 capabilities=0x0\n')
 
 
-def merge_dynamic_partition_info_dicts(system_dict,
-                                       other_dict,
+def merge_dynamic_partition_info_dicts(framework_dict,
+                                       vendor_dict,
                                        include_dynamic_partition_list=True,
                                        size_prefix='',
                                        size_suffix='',
@@ -426,10 +427,10 @@
   """Merges dynamic partition info variables.
 
   Args:
-    system_dict: The dictionary of dynamic partition info variables from the
-      partial system target files.
-    other_dict: The dictionary of dynamic partition info variables from the
-      partial other target files.
+    framework_dict: The dictionary of dynamic partition info variables from the
+      partial framework target files.
+    vendor_dict: The dictionary of dynamic partition info variables from the
+      partial vendor target files.
     include_dynamic_partition_list: If true, merges the dynamic_partition_list
       variable. Not all use cases need this variable merged.
     size_prefix: The prefix in partition group size variables that precedes the
@@ -447,53 +448,56 @@
     The merged dynamic partition info dictionary.
   """
   merged_dict = {}
-  # Partition groups and group sizes are defined by the other (non-system)
-  # dict because these values may vary for each board that uses a shared system
-  # image.
-  merged_dict['super_partition_groups'] = other_dict['super_partition_groups']
+  # Partition groups and group sizes are defined by the vendor dict because
+  # these values may vary for each board that uses a shared system image.
+  merged_dict['super_partition_groups'] = vendor_dict['super_partition_groups']
   if include_dynamic_partition_list:
-    system_dynamic_partition_list = system_dict.get('dynamic_partition_list',
+    framework_dynamic_partition_list = framework_dict.get(
+        'dynamic_partition_list', '')
+    vendor_dynamic_partition_list = vendor_dict.get('dynamic_partition_list',
                                                     '')
-    other_dynamic_partition_list = other_dict.get('dynamic_partition_list', '')
     merged_dict['dynamic_partition_list'] = (
-        '%s %s' %
-        (system_dynamic_partition_list, other_dynamic_partition_list)).strip()
+        '%s %s' % (framework_dynamic_partition_list,
+                   vendor_dynamic_partition_list)).strip()
   for partition_group in merged_dict['super_partition_groups'].split(' '):
-    # Set the partition group's size using the value from the other dict.
+    # Set the partition group's size using the value from the vendor dict.
     key = '%s%s%s' % (size_prefix, partition_group, size_suffix)
-    if key not in other_dict:
-      raise ValueError('Other dict does not contain required key %s.' % key)
-    merged_dict[key] = other_dict[key]
+    if key not in vendor_dict:
+      raise ValueError('Vendor dict does not contain required key %s.' % key)
+    merged_dict[key] = vendor_dict[key]
 
     # Set the partition group's partition list using a concatenation of the
-    # system and other partition lists.
+    # framework and vendor partition lists.
     key = '%s%s%s' % (list_prefix, partition_group, list_suffix)
     merged_dict[key] = (
-        '%s %s' % (system_dict.get(key, ''), other_dict.get(key, ''))).strip()
+        '%s %s' %
+        (framework_dict.get(key, ''), vendor_dict.get(key, ''))).strip()
   return merged_dict
 
 
-def process_misc_info_txt(system_target_files_temp_dir,
-                          other_target_files_temp_dir,
-                          output_target_files_temp_dir, system_misc_info_keys):
+def process_misc_info_txt(framework_target_files_temp_dir,
+                          vendor_target_files_temp_dir,
+                          output_target_files_temp_dir,
+                          framework_misc_info_keys):
   """Perform special processing for META/misc_info.txt.
 
   This function merges the contents of the META/misc_info.txt files from the
-  system directory and the other directory, placing the merged result in the
+  framework directory and the vendor directory, placing the merged result in the
   output directory. The precondition in that the files are already extracted.
   The post condition is that the output META/misc_info.txt contains the merged
   content.
 
   Args:
-    system_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the system target files package.
-    other_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the other target files package.
+    framework_target_files_temp_dir: The name of a directory containing the
+      special items extracted from the framework target files package.
+    vendor_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the vendor target files package.
     output_target_files_temp_dir: The name of a directory that will be used to
       create the output target files package after all the special cases are
       processed.
-    system_misc_info_keys: A list of keys to obtain from the system instance of
-      META/misc_info.txt. The remaining keys from the other instance.
+    framework_misc_info_keys: A list of keys to obtain from the framework
+      instance of META/misc_info.txt. The remaining keys from the vendor
+      instance.
   """
 
   def read_helper(d):
@@ -501,72 +505,72 @@
     with open(misc_info_txt) as f:
       return list(f.read().splitlines())
 
-  system_info_dict = common.LoadDictionaryFromLines(
-      read_helper(system_target_files_temp_dir))
+  framework_dict = common.LoadDictionaryFromLines(
+      read_helper(framework_target_files_temp_dir))
 
-  # We take most of the misc info from the other target files.
+  # We take most of the misc info from the vendor target files.
 
-  merged_info_dict = common.LoadDictionaryFromLines(
-      read_helper(other_target_files_temp_dir))
+  merged_dict = common.LoadDictionaryFromLines(
+      read_helper(vendor_target_files_temp_dir))
 
-  # Replace certain values in merged_info_dict with values from
-  # system_info_dict.
+  # Replace certain values in merged_dict with values from
+  # framework_dict.
 
-  for key in system_misc_info_keys:
-    merged_info_dict[key] = system_info_dict[key]
+  for key in framework_misc_info_keys:
+    merged_dict[key] = framework_dict[key]
 
   # Merge misc info keys used for Dynamic Partitions.
-  if (merged_info_dict.get('use_dynamic_partitions') == 'true') and (
-      system_info_dict.get('use_dynamic_partitions') == 'true'):
+  if (merged_dict.get('use_dynamic_partitions') == 'true') and (
+      framework_dict.get('use_dynamic_partitions') == 'true'):
     merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts(
-        system_dict=system_info_dict,
-        other_dict=merged_info_dict,
+        framework_dict=framework_dict,
+        vendor_dict=merged_dict,
         size_prefix='super_',
         size_suffix='_group_size',
         list_prefix='super_',
         list_suffix='_partition_list')
-    merged_info_dict.update(merged_dynamic_partitions_dict)
+    merged_dict.update(merged_dynamic_partitions_dict)
 
-  # Replace <image>_selinux_fc values with system or other file_contexts.bin
+  # Replace <image>_selinux_fc values with framework or vendor file_contexts.bin
   # depending on which dictionary the key came from.
   # Only the file basename is required because all selinux_fc properties are
   # replaced with the full path to the file under META/ when misc_info.txt is
   # loaded from target files for repacking. See common.py LoadInfoDict().
-  for key in merged_info_dict:
+  for key in merged_dict:
     if key.endswith('_selinux_fc'):
-      merged_info_dict[key] = 'other_file_contexts.bin'
-  for key in system_info_dict:
+      merged_dict[key] = 'vendor_file_contexts.bin'
+  for key in framework_dict:
     if key.endswith('_selinux_fc'):
-      merged_info_dict[key] = 'system_file_contexts.bin'
+      merged_dict[key] = 'framework_file_contexts.bin'
 
   output_misc_info_txt = os.path.join(output_target_files_temp_dir, 'META',
                                       'misc_info.txt')
-  write_sorted_data(data=merged_info_dict, path=output_misc_info_txt)
+  write_sorted_data(data=merged_dict, path=output_misc_info_txt)
 
 
-def process_dynamic_partitions_info_txt(system_target_files_dir,
-                                        other_target_files_dir,
+def process_dynamic_partitions_info_txt(framework_target_files_dir,
+                                        vendor_target_files_dir,
                                         output_target_files_dir):
   """Perform special processing for META/dynamic_partitions_info.txt.
 
   This function merges the contents of the META/dynamic_partitions_info.txt
-  files from the system directory and the other directory, placing the merged
-  result in the output directory.
+  files from the framework directory and the vendor directory, placing the
+  merged result in the output directory.
 
-  This function does nothing if META/dynamic_partitions_info.txt from the other
+  This function does nothing if META/dynamic_partitions_info.txt from the vendor
   directory does not exist.
 
   Args:
-    system_target_files_dir: The name of a directory containing the special
-      items extracted from the system target files package.
-    other_target_files_dir: The name of a directory containing the special items
-      extracted from the other target files package.
+    framework_target_files_dir: The name of a directory containing the special
+      items extracted from the framework target files package.
+    vendor_target_files_dir: The name of a directory containing the special
+      items extracted from the vendor target files package.
     output_target_files_dir: The name of a directory that will be used to create
       the output target files package after all the special cases are processed.
   """
 
   if not os.path.exists(
-      os.path.join(other_target_files_dir, 'META',
+      os.path.join(vendor_target_files_dir, 'META',
                    'dynamic_partitions_info.txt')):
     return
 
@@ -576,14 +580,14 @@
     with open(dynamic_partitions_info_txt) as f:
       return list(f.read().splitlines())
 
-  system_dynamic_partitions_dict = common.LoadDictionaryFromLines(
-      read_helper(system_target_files_dir))
-  other_dynamic_partitions_dict = common.LoadDictionaryFromLines(
-      read_helper(other_target_files_dir))
+  framework_dynamic_partitions_dict = common.LoadDictionaryFromLines(
+      read_helper(framework_target_files_dir))
+  vendor_dynamic_partitions_dict = common.LoadDictionaryFromLines(
+      read_helper(vendor_target_files_dir))
 
   merged_dynamic_partitions_dict = merge_dynamic_partition_info_dicts(
-      system_dict=system_dynamic_partitions_dict,
-      other_dict=other_dynamic_partitions_dict,
+      framework_dict=framework_dynamic_partitions_dict,
+      vendor_dict=vendor_dynamic_partitions_dict,
       # META/dynamic_partitions_info.txt does not use dynamic_partition_list.
       include_dynamic_partition_list=False,
       size_suffix='_size',
@@ -596,24 +600,23 @@
       path=output_dynamic_partitions_info_txt)
 
 
-def process_apex_keys_apk_certs_common(system_target_files_dir,
-                                       other_target_files_dir,
+def process_apex_keys_apk_certs_common(framework_target_files_dir,
+                                       vendor_target_files_dir,
                                        output_target_files_dir, file_name):
   """Perform special processing for META/apexkeys.txt or META/apkcerts.txt.
 
   This function merges the contents of the META/apexkeys.txt or
-  META/apkcerts.txt
-  files from the system directory and the other directory, placing the merged
-  result in the output directory. The precondition in that the files are already
-  extracted.
-  The post condition is that the output META/apexkeys.txt or META/apkcerts.txt
-  contains the merged content.
+  META/apkcerts.txt files from the framework directory and the vendor
+  directory, placing the merged result in the output directory. The
+  precondition in that the files are already extracted. The post condition
+  is that the output META/apexkeys.txt or META/apkcerts.txt contains the
+  merged content.
 
   Args:
-    system_target_files_dir: The name of a directory containing the special
-      items extracted from the system target files package.
-    other_target_files_dir: The name of a directory containing the special items
-      extracted from the other target files package.
+    framework_target_files_dir: The name of a directory containing the special
+      items extracted from the framework target files package.
+    vendor_target_files_dir: The name of a directory containing the special
+      items extracted from the vendor target files package.
     output_target_files_dir: The name of a directory that will be used to create
       the output target files package after all the special cases are processed.
     file_name: The name of the file to merge. One of apkcerts.txt or
@@ -629,73 +632,75 @@
           temp[line.split()[0]] = line.strip()
     return temp
 
-  system_dict = read_helper(system_target_files_dir)
-  other_dict = read_helper(other_target_files_dir)
+  framework_dict = read_helper(framework_target_files_dir)
+  vendor_dict = read_helper(vendor_target_files_dir)
 
-  for key in system_dict:
-    if key in other_dict and other_dict[key] != system_dict[key]:
+  for key in framework_dict:
+    if key in vendor_dict and vendor_dict[key] != framework_dict[key]:
       raise ValueError('Conflicting entries found in %s:\n %s and\n %s' %
-                       (file_name, system_dict[key], other_dict[key]))
-    other_dict[key] = system_dict[key]
+                       (file_name, framework_dict[key], vendor_dict[key]))
+    vendor_dict[key] = framework_dict[key]
 
   output_file = os.path.join(output_target_files_dir, 'META', file_name)
 
-  write_sorted_data(data=other_dict.values(), path=output_file)
+  write_sorted_data(data=vendor_dict.values(), path=output_file)
 
 
-def copy_file_contexts(system_target_files_dir, other_target_files_dir,
+def copy_file_contexts(framework_target_files_dir, vendor_target_files_dir,
                        output_target_files_dir):
   """Creates named copies of each build's file_contexts.bin in output META/."""
-  system_fc_path = os.path.join(system_target_files_dir, 'META',
-                                'system_file_contexts.bin')
-  if not os.path.exists(system_fc_path):
-    system_fc_path = os.path.join(system_target_files_dir, 'META',
+  framework_fc_path = os.path.join(framework_target_files_dir, 'META',
+                                   'framework_file_contexts.bin')
+  if not os.path.exists(framework_fc_path):
+    framework_fc_path = os.path.join(framework_target_files_dir, 'META',
+                                     'file_contexts.bin')
+    if not os.path.exists(framework_fc_path):
+      raise ValueError('Missing framework file_contexts.bin.')
+  shutil.copyfile(
+      framework_fc_path,
+      os.path.join(output_target_files_dir, 'META',
+                   'framework_file_contexts.bin'))
+
+  vendor_fc_path = os.path.join(vendor_target_files_dir, 'META',
+                                'vendor_file_contexts.bin')
+  if not os.path.exists(vendor_fc_path):
+    vendor_fc_path = os.path.join(vendor_target_files_dir, 'META',
                                   'file_contexts.bin')
-    if not os.path.exists(system_fc_path):
-      raise ValueError('Missing system file_contexts.bin.')
+    if not os.path.exists(vendor_fc_path):
+      raise ValueError('Missing vendor file_contexts.bin.')
   shutil.copyfile(
-      system_fc_path,
-      os.path.join(output_target_files_dir, 'META', 'system_file_contexts.bin'))
-
-  other_fc_path = os.path.join(other_target_files_dir, 'META',
-                               'other_file_contexts.bin')
-  if not os.path.exists(other_fc_path):
-    other_fc_path = os.path.join(other_target_files_dir, 'META',
-                                 'file_contexts.bin')
-    if not os.path.exists(other_fc_path):
-      raise ValueError('Missing other file_contexts.bin.')
-  shutil.copyfile(
-      other_fc_path,
-      os.path.join(output_target_files_dir, 'META', 'other_file_contexts.bin'))
+      vendor_fc_path,
+      os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
 
 
-def process_special_cases(system_target_files_temp_dir,
-                          other_target_files_temp_dir,
-                          output_target_files_temp_dir, system_misc_info_keys,
-                          rebuild_recovery):
+def process_special_cases(framework_target_files_temp_dir,
+                          vendor_target_files_temp_dir,
+                          output_target_files_temp_dir,
+                          framework_misc_info_keys, rebuild_recovery):
   """Perform special-case processing for certain target files items.
 
   Certain files in the output target files package require special-case
   processing. This function performs all that special-case processing.
 
   Args:
-    system_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the system target files package.
-    other_target_files_temp_dir: The name of a directory containing the special
-      items extracted from the other target files package.
+    framework_target_files_temp_dir: The name of a directory containing the
+      special items extracted from the framework target files package.
+    vendor_target_files_temp_dir: The name of a directory containing the special
+      items extracted from the vendor target files package.
     output_target_files_temp_dir: The name of a directory that will be used to
       create the output target files package after all the special cases are
       processed.
-    system_misc_info_keys: A list of keys to obtain from the system instance of
-      META/misc_info.txt. The remaining keys from the other instance.
+    framework_misc_info_keys: A list of keys to obtain from the framework
+      instance of META/misc_info.txt. The remaining keys from the vendor
+      instance.
     rebuild_recovery: If true, rebuild the recovery patch used by non-A/B
       devices and write it to the system image.
   """
 
-  if 'ab_update' in system_misc_info_keys:
+  if 'ab_update' in framework_misc_info_keys:
     process_ab_partitions_txt(
-        system_target_files_temp_dir=system_target_files_temp_dir,
-        other_target_files_temp_dir=other_target_files_temp_dir,
+        framework_target_files_temp_dir=framework_target_files_temp_dir,
+        vendor_target_files_temp_dir=vendor_target_files_temp_dir,
         output_target_files_temp_dir=output_target_files_temp_dir)
 
   if rebuild_recovery:
@@ -703,65 +708,66 @@
         output_target_files_temp_dir=output_target_files_temp_dir)
 
   copy_file_contexts(
-      system_target_files_dir=system_target_files_temp_dir,
-      other_target_files_dir=other_target_files_temp_dir,
+      framework_target_files_dir=framework_target_files_temp_dir,
+      vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir)
 
   process_misc_info_txt(
-      system_target_files_temp_dir=system_target_files_temp_dir,
-      other_target_files_temp_dir=other_target_files_temp_dir,
+      framework_target_files_temp_dir=framework_target_files_temp_dir,
+      vendor_target_files_temp_dir=vendor_target_files_temp_dir,
       output_target_files_temp_dir=output_target_files_temp_dir,
-      system_misc_info_keys=system_misc_info_keys)
+      framework_misc_info_keys=framework_misc_info_keys)
 
   process_dynamic_partitions_info_txt(
-      system_target_files_dir=system_target_files_temp_dir,
-      other_target_files_dir=other_target_files_temp_dir,
+      framework_target_files_dir=framework_target_files_temp_dir,
+      vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir)
 
   process_apex_keys_apk_certs_common(
-      system_target_files_dir=system_target_files_temp_dir,
-      other_target_files_dir=other_target_files_temp_dir,
+      framework_target_files_dir=framework_target_files_temp_dir,
+      vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir,
       file_name='apkcerts.txt')
 
   process_apex_keys_apk_certs_common(
-      system_target_files_dir=system_target_files_temp_dir,
-      other_target_files_dir=other_target_files_temp_dir,
+      framework_target_files_dir=framework_target_files_temp_dir,
+      vendor_target_files_dir=vendor_target_files_temp_dir,
       output_target_files_dir=output_target_files_temp_dir,
       file_name='apexkeys.txt')
 
 
-def merge_target_files(temp_dir, system_target_files, system_item_list,
-                       system_misc_info_keys, other_target_files,
-                       other_item_list, output_target_files, output_dir,
+def merge_target_files(temp_dir, framework_target_files, framework_item_list,
+                       framework_misc_info_keys, vendor_target_files,
+                       vendor_item_list, output_target_files, output_dir,
                        output_item_list, output_ota, output_img,
                        output_super_empty, rebuild_recovery):
   """Merge two target files packages together.
 
-  This function takes system and other target files packages as input, performs
-  various file extractions, special case processing, and finally creates a
-  merged zip archive as output.
+  This function takes framework and vendor target files packages as input,
+  performs various file extractions, special case processing, and finally
+  creates a merged zip archive as output.
 
   Args:
     temp_dir: The name of a directory we use when we extract items from the
       input target files packages, and also a scratch directory that we use for
       temporary files.
-    system_target_files: The name of the zip archive containing the system
+    framework_target_files: The name of the zip archive containing the framework
       partial target files package.
-    system_item_list: The list of items to extract from the partial system
+    framework_item_list: The list of items to extract from the partial framework
       target files package as is, meaning these items will land in the output
-      target files package exactly as they appear in the input partial system
+      target files package exactly as they appear in the input partial framework
       target files package.
-    system_misc_info_keys: The list of keys to obtain from the system instance
-      of META/misc_info.txt. The remaining keys from the other instance.
-    other_target_files: The name of the zip archive containing the other partial
+    framework_misc_info_keys: The list of keys to obtain from the framework
+      instance of META/misc_info.txt. The remaining keys from the vendor
+      instance.
+    vendor_target_files: The name of the zip archive containing the vendor
+      partial target files package.
+    vendor_item_list: The list of items to extract from the partial vendor
+      target files package as is, meaning these items will land in the output
+      target files package exactly as they appear in the input partial vendor
       target files package.
-    other_item_list: The list of items to extract from the partial other target
-      files package as is, meaning these items will land in the output target
-      files package exactly as they appear in the input partial other target
-      files package.
     output_target_files: The name of the output zip archive target files package
-      created by merging system and other.
+      created by merging framework and vendor.
     output_dir: The destination directory for saving merged files.
     output_item_list: The list of items to copy into the output_dir.
     output_ota: The name of the output zip archive ota package.
@@ -772,51 +778,51 @@
       devices and write it to the system image.
   """
 
-  logger.info('starting: merge system %s and other %s into output %s',
-              system_target_files, other_target_files, output_target_files)
+  logger.info('starting: merge framework %s and vendor %s into output %s',
+              framework_target_files, vendor_target_files, output_target_files)
 
-  # Create directory names that we'll use when we extract files from system,
-  # and other, and for zipping the final output.
+  # Create directory names that we'll use when we extract files from framework,
+  # and vendor, and for zipping the final output.
 
-  system_target_files_temp_dir = os.path.join(temp_dir, 'system')
-  other_target_files_temp_dir = os.path.join(temp_dir, 'other')
+  framework_target_files_temp_dir = os.path.join(temp_dir, 'framework')
+  vendor_target_files_temp_dir = os.path.join(temp_dir, 'vendor')
   output_target_files_temp_dir = os.path.join(temp_dir, 'output')
 
-  # Extract "as is" items from the input system partial target files package.
+  # Extract "as is" items from the input framework partial target files package.
   # We extract them directly into the output temporary directory since the
   # items do not need special case processing.
 
   extract_items(
-      target_files=system_target_files,
+      target_files=framework_target_files,
       target_files_temp_dir=output_target_files_temp_dir,
-      extract_item_list=system_item_list)
+      extract_item_list=framework_item_list)
 
-  # Extract "as is" items from the input other partial target files package. We
+  # Extract "as is" items from the input vendor partial target files package. We
   # extract them directly into the output temporary directory since the items
   # do not need special case processing.
 
   extract_items(
-      target_files=other_target_files,
+      target_files=vendor_target_files,
       target_files_temp_dir=output_target_files_temp_dir,
-      extract_item_list=other_item_list)
+      extract_item_list=vendor_item_list)
 
-  # Extract "special" items from the input system partial target files package.
+  # Extract "special" items from the input framework partial target files
+  # package. We extract these items to different directory since they require
+  # special processing before they will end up in the output directory.
+
+  extract_items(
+      target_files=framework_target_files,
+      target_files_temp_dir=framework_target_files_temp_dir,
+      extract_item_list=FRAMEWORK_EXTRACT_SPECIAL_ITEM_LIST)
+
+  # Extract "special" items from the input vendor partial target files package.
   # We extract these items to different directory since they require special
   # processing before they will end up in the output directory.
 
   extract_items(
-      target_files=system_target_files,
-      target_files_temp_dir=system_target_files_temp_dir,
-      extract_item_list=SYSTEM_EXTRACT_SPECIAL_ITEM_LIST)
-
-  # Extract "special" items from the input other partial target files package.
-  # We extract these items to different directory since they require special
-  # processing before they will end up in the output directory.
-
-  extract_items(
-      target_files=other_target_files,
-      target_files_temp_dir=other_target_files_temp_dir,
-      extract_item_list=OTHER_EXTRACT_SPECIAL_ITEM_LIST)
+      target_files=vendor_target_files,
+      target_files_temp_dir=vendor_target_files_temp_dir,
+      extract_item_list=VENDOR_EXTRACT_SPECIAL_ITEM_LIST)
 
   # Now that the temporary directories contain all the extracted files, perform
   # special case processing on any items that need it. After this function
@@ -824,10 +830,10 @@
   # files package are in place.
 
   process_special_cases(
-      system_target_files_temp_dir=system_target_files_temp_dir,
-      other_target_files_temp_dir=other_target_files_temp_dir,
+      framework_target_files_temp_dir=framework_target_files_temp_dir,
+      vendor_target_files_temp_dir=vendor_target_files_temp_dir,
       output_target_files_temp_dir=output_target_files_temp_dir,
-      system_misc_info_keys=system_misc_info_keys,
+      framework_misc_info_keys=framework_misc_info_keys,
       rebuild_recovery=rebuild_recovery)
 
   # Regenerate IMAGES in the temporary directory.
@@ -925,6 +931,7 @@
   ]
   logger.info('creating %s', output_target_files)
   common.RunAndWait(command, verbose=True)
+  logger.info('finished creating %s', output_target_files)
 
   # Create the OTA package from the merged target files package.
 
@@ -976,15 +983,35 @@
 
   def option_handler(o, a):
     if o == '--system-target-files':
-      OPTIONS.system_target_files = a
+      logger.warning(
+          '--system-target-files has been renamed to --framework-target-files')
+      OPTIONS.framework_target_files = a
+    elif o == '--framework-target-files':
+      OPTIONS.framework_target_files = a
     elif o == '--system-item-list':
-      OPTIONS.system_item_list = a
+      logger.warning(
+          '--system-item-list has been renamed to --framework-item-list')
+      OPTIONS.framework_item_list = a
+    elif o == '--framework-item-list':
+      OPTIONS.framework_item_list = a
     elif o == '--system-misc-info-keys':
-      OPTIONS.system_misc_info_keys = a
+      logger.warning(
+          '--system-misc-info-keys has been renamed to --framework-misc-info-keys'
+      )
+      OPTIONS.framework_misc_info_keys = a
+    elif o == '--framework-misc-info-keys':
+      OPTIONS.framework_misc_info_keys = a
     elif o == '--other-target-files':
-      OPTIONS.other_target_files = a
+      logger.warning(
+          '--other-target-files has been renamed to --vendor-target-files')
+      OPTIONS.vendor_target_files = a
+    elif o == '--vendor-target-files':
+      OPTIONS.vendor_target_files = a
     elif o == '--other-item-list':
-      OPTIONS.other_item_list = a
+      logger.warning('--other-item-list has been renamed to --vendor-item-list')
+      OPTIONS.vendor_item_list = a
+    elif o == '--vendor-item-list':
+      OPTIONS.vendor_item_list = a
     elif o == '--output-target-files':
       OPTIONS.output_target_files = a
     elif o == '--output-dir':
@@ -1010,10 +1037,15 @@
       __doc__,
       extra_long_opts=[
           'system-target-files=',
+          'framework-target-files=',
           'system-item-list=',
+          'framework-item-list=',
           'system-misc-info-keys=',
+          'framework-misc-info-keys=',
           'other-target-files=',
+          'vendor-target-files=',
           'other-item-list=',
+          'vendor-item-list=',
           'output-target-files=',
           'output-dir=',
           'output-item-list=',
@@ -1025,27 +1057,28 @@
       ],
       extra_option_handler=option_handler)
 
-  if (args or OPTIONS.system_target_files is None or
-      OPTIONS.other_target_files is None or
+  if (args or OPTIONS.framework_target_files is None or
+      OPTIONS.vendor_target_files is None or
       (OPTIONS.output_target_files is None and OPTIONS.output_dir is None) or
       (OPTIONS.output_dir is not None and OPTIONS.output_item_list is None)):
     common.Usage(__doc__)
     sys.exit(1)
 
-  if OPTIONS.system_item_list:
-    system_item_list = read_config_list(OPTIONS.system_item_list)
+  if OPTIONS.framework_item_list:
+    framework_item_list = read_config_list(OPTIONS.framework_item_list)
   else:
-    system_item_list = DEFAULT_SYSTEM_ITEM_LIST
+    framework_item_list = DEFAULT_FRAMEWORK_ITEM_LIST
 
-  if OPTIONS.system_misc_info_keys:
-    system_misc_info_keys = read_config_list(OPTIONS.system_misc_info_keys)
+  if OPTIONS.framework_misc_info_keys:
+    framework_misc_info_keys = read_config_list(
+        OPTIONS.framework_misc_info_keys)
   else:
-    system_misc_info_keys = DEFAULT_SYSTEM_MISC_INFO_KEYS
+    framework_misc_info_keys = DEFAULT_FRAMEWORK_MISC_INFO_KEYS
 
-  if OPTIONS.other_item_list:
-    other_item_list = read_config_list(OPTIONS.other_item_list)
+  if OPTIONS.vendor_item_list:
+    vendor_item_list = read_config_list(OPTIONS.vendor_item_list)
   else:
-    other_item_list = DEFAULT_OTHER_ITEM_LIST
+    vendor_item_list = DEFAULT_VENDOR_ITEM_LIST
 
   if OPTIONS.output_item_list:
     output_item_list = read_config_list(OPTIONS.output_item_list)
@@ -1053,19 +1086,19 @@
     output_item_list = None
 
   if not validate_config_lists(
-      system_item_list=system_item_list,
-      system_misc_info_keys=system_misc_info_keys,
-      other_item_list=other_item_list):
+      framework_item_list=framework_item_list,
+      framework_misc_info_keys=framework_misc_info_keys,
+      vendor_item_list=vendor_item_list):
     sys.exit(1)
 
   call_func_with_temp_dir(
       lambda temp_dir: merge_target_files(
           temp_dir=temp_dir,
-          system_target_files=OPTIONS.system_target_files,
-          system_item_list=system_item_list,
-          system_misc_info_keys=system_misc_info_keys,
-          other_target_files=OPTIONS.other_target_files,
-          other_item_list=other_item_list,
+          framework_target_files=OPTIONS.framework_target_files,
+          framework_item_list=framework_item_list,
+          framework_misc_info_keys=framework_misc_info_keys,
+          vendor_target_files=OPTIONS.vendor_target_files,
+          vendor_item_list=vendor_item_list,
           output_target_files=OPTIONS.output_target_files,
           output_dir=OPTIONS.output_dir,
           output_item_list=output_item_list,
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 978f679..0a636bb 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -19,9 +19,9 @@
 import common
 import test_utils
 from merge_target_files import (read_config_list, validate_config_lists,
-                                DEFAULT_SYSTEM_ITEM_LIST,
-                                DEFAULT_OTHER_ITEM_LIST,
-                                DEFAULT_SYSTEM_MISC_INFO_KEYS, copy_items,
+                                DEFAULT_FRAMEWORK_ITEM_LIST,
+                                DEFAULT_VENDOR_ITEM_LIST,
+                                DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
                                 merge_dynamic_partition_info_dicts,
                                 process_apex_keys_apk_certs_common)
 
@@ -84,10 +84,10 @@
         os.readlink(os.path.join(output_dir, 'a_link.cpp')), 'a.cpp')
 
   def test_read_config_list(self):
-    system_item_list_file = os.path.join(self.testdata_dir,
-                                         'merge_config_system_item_list')
-    system_item_list = read_config_list(system_item_list_file)
-    expected_system_item_list = [
+    framework_item_list_file = os.path.join(self.testdata_dir,
+                                            'merge_config_framework_item_list')
+    framework_item_list = read_config_list(framework_item_list_file)
+    expected_framework_item_list = [
         'META/apkcerts.txt',
         'META/filesystem_config.txt',
         'META/root_filesystem_config.txt',
@@ -98,53 +98,58 @@
         'ROOT/*',
         'SYSTEM/*',
     ]
-    self.assertItemsEqual(system_item_list, expected_system_item_list)
+    self.assertItemsEqual(framework_item_list, expected_framework_item_list)
 
   def test_validate_config_lists_ReturnsFalseIfMissingDefaultItem(self):
-    system_item_list = list(DEFAULT_SYSTEM_ITEM_LIST)
-    system_item_list.remove('SYSTEM/*')
+    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
+    framework_item_list.remove('SYSTEM/*')
     self.assertFalse(
-        validate_config_lists(system_item_list, DEFAULT_SYSTEM_MISC_INFO_KEYS,
-                              DEFAULT_OTHER_ITEM_LIST))
+        validate_config_lists(framework_item_list,
+                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
+                              DEFAULT_VENDOR_ITEM_LIST))
 
   def test_validate_config_lists_ReturnsTrueIfDefaultItemInDifferentList(self):
-    system_item_list = list(DEFAULT_SYSTEM_ITEM_LIST)
-    system_item_list.remove('ROOT/*')
-    other_item_list = list(DEFAULT_OTHER_ITEM_LIST)
-    other_item_list.append('ROOT/*')
+    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
+    framework_item_list.remove('ROOT/*')
+    vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
+    vendor_item_list.append('ROOT/*')
     self.assertTrue(
-        validate_config_lists(system_item_list, DEFAULT_SYSTEM_MISC_INFO_KEYS,
-                              other_item_list))
+        validate_config_lists(framework_item_list,
+                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
+                              vendor_item_list))
 
   def test_validate_config_lists_ReturnsTrueIfExtraItem(self):
-    system_item_list = list(DEFAULT_SYSTEM_ITEM_LIST)
-    system_item_list.append('MY_NEW_PARTITION/*')
+    framework_item_list = list(DEFAULT_FRAMEWORK_ITEM_LIST)
+    framework_item_list.append('MY_NEW_PARTITION/*')
     self.assertTrue(
-        validate_config_lists(system_item_list, DEFAULT_SYSTEM_MISC_INFO_KEYS,
-                              DEFAULT_OTHER_ITEM_LIST))
+        validate_config_lists(framework_item_list,
+                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
+                              DEFAULT_VENDOR_ITEM_LIST))
 
   def test_validate_config_lists_ReturnsFalseIfSharedExtractedPartition(self):
-    other_item_list = list(DEFAULT_OTHER_ITEM_LIST)
-    other_item_list.append('SYSTEM/my_system_file')
+    vendor_item_list = list(DEFAULT_VENDOR_ITEM_LIST)
+    vendor_item_list.append('SYSTEM/my_system_file')
     self.assertFalse(
-        validate_config_lists(DEFAULT_SYSTEM_ITEM_LIST,
-                              DEFAULT_SYSTEM_MISC_INFO_KEYS, other_item_list))
+        validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
+                              DEFAULT_FRAMEWORK_MISC_INFO_KEYS,
+                              vendor_item_list))
 
   def test_validate_config_lists_ReturnsFalseIfBadSystemMiscInfoKeys(self):
     for bad_key in ['dynamic_partition_list', 'super_partition_groups']:
-      system_misc_info_keys = list(DEFAULT_SYSTEM_MISC_INFO_KEYS)
-      system_misc_info_keys.append(bad_key)
+      framework_misc_info_keys = list(DEFAULT_FRAMEWORK_MISC_INFO_KEYS)
+      framework_misc_info_keys.append(bad_key)
       self.assertFalse(
-          validate_config_lists(DEFAULT_SYSTEM_ITEM_LIST, system_misc_info_keys,
-                                DEFAULT_OTHER_ITEM_LIST))
+          validate_config_lists(DEFAULT_FRAMEWORK_ITEM_LIST,
+                                framework_misc_info_keys,
+                                DEFAULT_VENDOR_ITEM_LIST))
 
   def test_merge_dynamic_partition_info_dicts_ReturnsMergedDict(self):
-    system_dict = {
+    framework_dict = {
         'super_partition_groups': 'group_a',
         'dynamic_partition_list': 'system',
         'super_group_a_list': 'system',
     }
-    other_dict = {
+    vendor_dict = {
         'super_partition_groups': 'group_a group_b',
         'dynamic_partition_list': 'vendor product',
         'super_group_a_list': 'vendor',
@@ -153,8 +158,8 @@
         'super_group_b_size': '2000',
     }
     merged_dict = merge_dynamic_partition_info_dicts(
-        system_dict=system_dict,
-        other_dict=other_dict,
+        framework_dict=framework_dict,
+        vendor_dict=vendor_dict,
         size_prefix='super_',
         size_suffix='_size',
         list_prefix='super_',
@@ -173,19 +178,19 @@
     output_dir = common.MakeTempDir()
     os.makedirs(os.path.join(output_dir, 'META'))
 
-    system_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(system_dir, 'META'))
+    framework_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(framework_dir, 'META'))
     os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_system.txt'),
-        os.path.join(system_dir, 'META', 'apexkeys.txt'))
+        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
+        os.path.join(framework_dir, 'META', 'apexkeys.txt'))
 
-    other_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(other_dir, 'META'))
+    vendor_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(vendor_dir, 'META'))
     os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_other.txt'),
-        os.path.join(other_dir, 'META', 'apexkeys.txt'))
+        os.path.join(self.testdata_dir, 'apexkeys_vendor.txt'),
+        os.path.join(vendor_dir, 'META', 'apexkeys.txt'))
 
-    process_apex_keys_apk_certs_common(system_dir, other_dir, output_dir,
+    process_apex_keys_apk_certs_common(framework_dir, vendor_dir, output_dir,
                                        'apexkeys.txt')
 
     merged_entries = []
@@ -206,17 +211,17 @@
     output_dir = common.MakeTempDir()
     os.makedirs(os.path.join(output_dir, 'META'))
 
-    system_dir = common.MakeTempDir()
-    os.makedirs(os.path.join(system_dir, 'META'))
+    framework_dir = common.MakeTempDir()
+    os.makedirs(os.path.join(framework_dir, 'META'))
     os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_system.txt'),
-        os.path.join(system_dir, 'META', 'apexkeys.txt'))
+        os.path.join(self.testdata_dir, 'apexkeys_framework.txt'),
+        os.path.join(framework_dir, 'META', 'apexkeys.txt'))
 
     conflict_dir = common.MakeTempDir()
     os.makedirs(os.path.join(conflict_dir, 'META'))
     os.symlink(
-        os.path.join(self.testdata_dir, 'apexkeys_system_conflict.txt'),
+        os.path.join(self.testdata_dir, 'apexkeys_framework_conflict.txt'),
         os.path.join(conflict_dir, 'META', 'apexkeys.txt'))
 
     self.assertRaises(ValueError, process_apex_keys_apk_certs_common,
-                      system_dir, conflict_dir, output_dir, 'apexkeys.txt')
+                      framework_dir, conflict_dir, output_dir, 'apexkeys.txt')
diff --git a/tools/releasetools/testdata/apexkeys_system.txt b/tools/releasetools/testdata/apexkeys_framework.txt
similarity index 100%
rename from tools/releasetools/testdata/apexkeys_system.txt
rename to tools/releasetools/testdata/apexkeys_framework.txt
diff --git a/tools/releasetools/testdata/apexkeys_system_conflict.txt b/tools/releasetools/testdata/apexkeys_framework_conflict.txt
similarity index 100%
rename from tools/releasetools/testdata/apexkeys_system_conflict.txt
rename to tools/releasetools/testdata/apexkeys_framework_conflict.txt
diff --git a/tools/releasetools/testdata/apexkeys_other.txt b/tools/releasetools/testdata/apexkeys_vendor.txt
similarity index 100%
rename from tools/releasetools/testdata/apexkeys_other.txt
rename to tools/releasetools/testdata/apexkeys_vendor.txt
diff --git a/tools/releasetools/testdata/merge_config_system_item_list b/tools/releasetools/testdata/merge_config_framework_item_list
similarity index 100%
rename from tools/releasetools/testdata/merge_config_system_item_list
rename to tools/releasetools/testdata/merge_config_framework_item_list
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 1c856a8..37d5d27 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -327,11 +327,14 @@
     cmd = ['avbtool', 'verify_image', '--image', image, '--key', key]
 
     # Append the args for chained partitions if any.
-    for partition in common.AVB_PARTITIONS:
+    for partition in common.AVB_PARTITIONS + common.AVB_VBMETA_PARTITIONS:
       key_name = 'avb_' + partition + '_key_path'
       if info_dict.get(key_name) is not None:
+        # Use the key file from command line if specified; otherwise fall back
+        # to the one in info dict.
+        key_file = options.get(key_name, info_dict[key_name])
         chained_partition_arg = common.GetAvbChainedPartitionArg(
-            partition, info_dict, options[key_name])
+            partition, info_dict, key_file)
         cmd.extend(["--expected_chain_partition", chained_partition_arg])
 
     proc = common.Run(cmd)
@@ -357,7 +360,7 @@
       help='the verity public key to verify the bootable images (Verified '
            'Boot 1.0), or the vbmeta image (Verified Boot 2.0, aka AVB), where '
            'applicable')
-  for partition in common.AVB_PARTITIONS:
+  for partition in common.AVB_PARTITIONS + common.AVB_VBMETA_PARTITIONS:
     parser.add_argument(
         '--avb_' + partition + '_key_path',
         help='the public or private key in PEM format to verify AVB chained '