Merge "Enable cfi for 32bit arch"
diff --git a/Android.bp b/Android.bp
new file mode 100644
index 0000000..ab2564e
--- /dev/null
+++ b/Android.bp
@@ -0,0 +1,49 @@
+//
+// Copyright (C) 2021 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["build_make_license"],
+}
+
+// Added automatically by a large-scale-change that took the approach of
+// 'apply every license found to every target'. While this makes sure we respect
+// every license restriction, it may not be entirely correct.
+//
+// e.g. GPL in an MIT project might only apply to the contrib/ directory.
+//
+// Please consider splitting the single license below into multiple licenses,
+// taking care not to lose any license_kind information, and overriding the
+// default license using the 'licenses: [...]' property on targets as needed.
+//
+// For unused files, consider creating a 'fileGroup' with "//visibility:private"
+// to attach the license to, and including a comment whether the files may be
+// used in the current project.
+// See: http://go/android-license-faq
+license {
+    name: "build_make_license",
+    visibility: [":__subpackages__"],
+    license_kinds: [
+        "SPDX-license-identifier-Apache-2.0",
+        "SPDX-license-identifier-BSD",
+        "SPDX-license-identifier-CC-BY",
+        "SPDX-license-identifier-GPL",
+        "SPDX-license-identifier-GPL-2.0",
+        "SPDX-license-identifier-LGPL",
+        "SPDX-license-identifier-MIT",
+        "legacy_not_a_contribution",
+        "legacy_restricted",
+    ],
+    // large-scale-change unable to identify any license_text files
+}
diff --git a/Changes.md b/Changes.md
index 84c8d95..0a6adc4 100644
--- a/Changes.md
+++ b/Changes.md
@@ -17,9 +17,9 @@
 System properties for each of the partition is supposed to be set via following
 product config variables.
 
-For system partititon,
+For system partition,
 
-* `PRODUCT_SYSTEM_PROPERITES`
+* `PRODUCT_SYSTEM_PROPERTIES`
 * `PRODUCT_SYSTEM_DEFAULT_PROPERTIES` is highly discouraged. Will be deprecated.
 
 For vendor partition,
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 41defb2..3beadff 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -754,6 +754,8 @@
 # Workaround for Soong not being able to rebuild the host binary if its
 # JNI dependencies change: b/170389375
 $(call add-clean-step, rm -rf $(OUT_DIR)/soong/host/*/lib*/libconscrypt_openjdk_jni.so)
+# vendor-ramdisk renamed to vendor_ramdisk
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/vendor-ramdisk)
 
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/METADATA b/METADATA
new file mode 100644
index 0000000..814cb00
--- /dev/null
+++ b/METADATA
@@ -0,0 +1,8 @@
+third_party {
+  # would be NOTICE save for GPL in:
+  #   core/LINUX_KERNEL_COPYING
+  #   tools/droiddoc/templates-pdk/assets/jquery-1.6.2.min.js
+  #   tools/droiddoc/templates-pdk/assets/jquery-history.js
+  #   tools/droiddoc/templates-pdk/assets/jquery-resizable.min.js
+  license_type: RESTRICTED
+}
diff --git a/PREUPLOAD.cfg b/PREUPLOAD.cfg
new file mode 100644
index 0000000..ce75150
--- /dev/null
+++ b/PREUPLOAD.cfg
@@ -0,0 +1,2 @@
+[Hook Scripts]
+do_not_use_DO_NOT_MERGE = ${REPO_ROOT}/build/soong/scripts/check_do_not_merge.sh ${PREUPLOAD_COMMIT}
diff --git a/common/json.mk b/common/json.mk
index ba8ffa7..e376aab 100644
--- a/common/json.mk
+++ b/common/json.mk
@@ -24,7 +24,10 @@
 add_json_csv =$= $(call add_json_val,$(1),$(call csv_to_json_list,$(strip $(2))))
 add_json_bool =$= $(call add_json_val,$(1),$(if $(strip $(2)),true,false))
 add_json_map =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": {$$(newline))$(json_increase_indent)
+add_json_map_anon =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent){$$(newline))$(json_increase_indent)
 end_json_map =$= $(json_decrease_indent)$(eval _json_contents := $$(_json_contents)$$(if $$(filter %$$(comma),$$(lastword $$(_json_contents))),__SV_END)$$(_json_indent)},$$(newline))
+add_json_array =$= $(eval _json_contents := $$(_json_contents)$$(_json_indent)"$$(strip $$(1))": [$$(newline))$(json_increase_indent)
+end_json_array =$= $(json_decrease_indent)$(eval _json_contents := $$(_json_contents)$$(if $$(filter %$$(comma),$$(lastword $$(_json_contents))),__SV_END)$$(_json_indent)],$$(newline))
 
 # Clears _json_contents to start a new json file
 json_start =$= $(eval _json_contents := {$$(newline))$(eval _json_indent := $$(4space))
diff --git a/core/Makefile b/core/Makefile
index 40e9274..0d993d0 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -230,7 +230,7 @@
 # $(7): module archive
 # $(8): staging dir for stripped modules
 # $(9): module directory name
-# Returns the a list of src:dest pairs to install the modules using copy-many-files.
+# Returns a list of src:dest pairs to install the modules using copy-many-files.
 define build-image-kernel-modules
   $(if $(9), \
     $(eval _dir := $(9)/), \
@@ -315,6 +315,26 @@
 	@echo '$$(strip $$(notdir $$(PRIVATE_LOAD_MODULES)))' | tr ' ' '\n' > $$(@)
 endef
 
+# $(1): source blocklist file
+# $(2): destination pathname
+# Returns a build rule that checks the syntax of and installs a kernel modules
+# blocklist file. Strip and squeeze any extra space in the blocklist.
+# For use via $(eval).
+define build-image-kernel-modules-blocklist-file
+$(2): $(1)
+	@echo "modprobe blocklist $$(@)"
+	$(hide) mkdir -p "$$(dir $$@)"
+	$(hide) rm -f "$$@"
+	$(hide) awk <"$$<" >"$$@" \
+	  '/^#/ { print; next } \
+	   NF == 0 { next } \
+	   NF != 2 || $$$$1 != "blocklist" \
+	     { print "Invalid blocklist line " FNR ": " $$$$0 >"/dev/stderr"; \
+	       exit_status = 1; next } \
+	   { $$$$1 = $$$$1; print } \
+	   END { exit exit_status }'
+endef
+
 # $(1): image name
 # $(2): build output directory (TARGET_OUT_VENDOR, TARGET_RECOVERY_ROOT_OUT, etc)
 # $(3): mount point
@@ -331,7 +351,12 @@
 $(if $(strip $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver))$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver))),\
   $(if $(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),,\
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)))) \
-  $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver))))
+  $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver)))) \
+$(if $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
+  $(eval $(call build-image-kernel-modules-blocklist-file, \
+    $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
+    $(2)/lib/modules/modules.blocklist)) \
+  $(2)/lib/modules/modules.blocklist)
 endef
 
 # $(1): kernel module directory name (top is an out of band value for no directory)
@@ -669,10 +694,10 @@
 $(INSTALLED_FILES_FILE_ROOT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ROOT)
 $(INSTALLED_FILES_FILE_ROOT) : $(INTERNAL_ROOT_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_ROOT_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
 
@@ -698,11 +723,11 @@
 $(INSTALLED_FILES_FILE_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RAMDISK)
 $(INSTALLED_FILES_FILE_RAMDISK) : $(INTERNAL_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(TARGET_RAMDISK_OUT)
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(TARGET_RAMDISK_OUT)
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_RAMDISK_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(call dist-for-goals, sdk win_sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
@@ -721,7 +746,7 @@
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
 $(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
-	$(call pretty,"Target ram disk: $@")
+	$(call pretty,"Target ramdisk: $@")
 	$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
 
 .PHONY: ramdisk-nodeps
@@ -775,7 +800,7 @@
 endif
 
 # $1: boot image file name
-# $2: boot image variant (boot, boot-debug)
+# $2: boot image variant (boot, boot-debug, boot-test-harness)
 define get-bootimage-partition-size
   $(BOARD_$(call to-upper,$(subst .img,,$(subst $(2),kernel,$(notdir $(1)))))_BOOTIMAGE_PARTITION_SIZE)
 endef
@@ -955,7 +980,7 @@
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES))
 
-INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor-boot)/vendor-ramdisk.cpio$(RAMDISK_EXT)
+INTERNAL_VENDOR_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot)/vendor_ramdisk.cpio$(RAMDISK_EXT)
 
 ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
 $(INTERNAL_VENDOR_RAMDISK_TARGET): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
@@ -965,16 +990,23 @@
 $(INTERNAL_VENDOR_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
 	$(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_VENDOR_RAMDISK_OUT) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
 
+ifeq (true,$(BOARD_BUILD_VENDOR_RAMDISK_IMAGE))
+INSTALLED_VENDOR_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor_ramdisk.img
+$(INSTALLED_VENDOR_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET)
+	$(call pretty,"Target vendor ramdisk: $@")
+	$(copy-file-to-target)
+endif
+
 INSTALLED_FILES_FILE_VENDOR_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk.txt
 INSTALLED_FILES_JSON_VENDOR_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_RAMDISK:.txt=.json)
 $(INSTALLED_FILES_FILE_VENDOR_RAMDISK): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_RAMDISK)
 $(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_TARGET)
 $(INSTALLED_FILES_FILE_VENDOR_RAMDISK): $(INTERNAL_VENDOR_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
-	echo Installed file list: $@
+	@echo Installed file list: $@
 	mkdir -p $(dir $@)
 	rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	$(FILESLIST) $(TARGET_VENDOR_RAMDISK_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
   INTERNAL_VENDOR_BOOTIMAGE_ARGS += --dtb $(INSTALLED_DTBIMAGE_TARGET)
@@ -989,6 +1021,20 @@
   INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_cmdline "$(INTERNAL_KERNEL_CMDLINE)"
 endif
 
+ifdef INTERNAL_BOOTCONFIG
+ifneq (,$(findstring androidboot.hardware=, $(INTERNAL_BOOTCONFIG)))
+$(error "androidboot.hardware" BOOTCONFIG parameter is not supported due to \
+  bootconfig limitations. Use "hardware" instead. INTERNAL_BOOTCONFIG: \
+  $(INTERNAL_BOOTCONFIG))
+endif
+INTERNAL_VENDOR_BOOTCONFIG_TARGET := $(PRODUCT_OUT)/vendor-bootconfig.img
+$(INTERNAL_VENDOR_BOOTCONFIG_TARGET):
+	rm -f $@
+	$(foreach param,$(INTERNAL_BOOTCONFIG), \
+	 printf "%s\n" $(param) >> $@;)
+  INTERNAL_VENDOR_BOOTIMAGE_ARGS += --vendor_bootconfig $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+endif
+
 # $(1): Build target name
 # $(2): Staging dir to be compressed
 # $(3): Build dependencies
@@ -1027,13 +1073,16 @@
     $(eval vendor_ramdisk_fragment_target := $(call build-vendor-ramdisk-fragment,$(vendor_ramdisk_fragment))) \
     $(if $(filter --ramdisk_type,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)),, \
       $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_type DLKM))) \
+  $(if $(filter --ramdisk_name,$(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS)), \
+    $(error Must not specify --ramdisk_name for vendor ramdisk fragment: $(vendor_ramdisk_fragment))) \
+  $(eval BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS += --ramdisk_name $(vendor_ramdisk_fragment)) \
   $(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS += $(vendor_ramdisk_fragment_target)) \
   $(eval INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS += $(BOARD_VENDOR_RAMDISK_FRAGMENT.$(vendor_ramdisk_fragment).MKBOOTIMG_ARGS) --vendor_ramdisk_fragment $(vendor_ramdisk_fragment_target)) \
 )
 
 INSTALLED_VENDOR_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_boot.img
 $(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET)
-$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+$(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS) $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_VENDOR_BOOTIMAGE_TARGET): $(AVBTOOL) $(BOARD_AVB_VENDOR_BOOTIMAGE_KEY_PATH)
 	$(call pretty,"Target vendor_boot image: $@")
@@ -1220,7 +1269,7 @@
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/data/%,$(license_modules_rest))
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/ramdisk/%,$(license_modules_rest))
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/debug_ramdisk/%,$(license_modules_rest))
-license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor-ramdisk/%,$(license_modules_rest))
+license_modules_rehomed += $(filter $(PRODUCT_OUT)/vendor_ramdisk/%,$(license_modules_rest))
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist/%,$(license_modules_rest))
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/persist.img,$(license_modules_rest))
 license_modules_rehomed += $(filter $(PRODUCT_OUT)/system_other/%,$(license_modules_rest))
@@ -1755,10 +1804,10 @@
 $(INSTALLED_FILES_FILE_RECOVERY): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_RECOVERY)
 $(INSTALLED_FILES_FILE_RECOVERY): $(INTERNAL_RECOVERYIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 recovery_sepolicy := \
     $(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
@@ -2212,7 +2261,7 @@
 # Need to depend on the built ramdisk-debug.img, to get a complete list of the installed files.
 $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INSTALLED_DEBUG_RAMDISK_TARGET)
 $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) : $(INTERNAL_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
-	echo Installed file list: $@
+	@echo Installed file list: $@
 	mkdir -p $(dir $@)
 	rm -f $@
 	$(FILESLIST) $(DEBUG_RAMDISK_ROOT_DIR) > $(@:.txt=.json)
@@ -2237,7 +2286,7 @@
 $(INSTALLED_DEBUG_RAMDISK_TARGET): $(INSTALLED_RAMDISK_TARGET)
 endif # BOARD_USES_RECOVERY_AS_BOOT
 $(INSTALLED_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
-	$(call pretty,"Target debug ram disk: $@")
+	$(call pretty,"Target debug ramdisk: $@")
 	mkdir -p $(TARGET_DEBUG_RAMDISK_OUT)
 	touch $(TARGET_DEBUG_RAMDISK_OUT)/force_debuggable
 	rsync -a $(DEBUG_RAMDISK_SYNC_DIR)/ $(DEBUG_RAMDISK_ROOT_DIR)
@@ -2290,21 +2339,22 @@
 BOARD_AVB_BOOT_TEST_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
 INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS := --algorithm SHA256_RSA2048 --key $(BOARD_AVB_BOOT_TEST_KEY_PATH)
 # $(1): the bootimage to sign
+# $(2): boot image variant (boot, boot-debug, boot-test-harness)
 define test-key-sign-bootimage
-$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),boot-debug)))
+$(call assert-max-image-size,$(1),$(call get-hash-image-max-size,$(call get-bootimage-partition-size,$(1),$(2))))
 $(AVBTOOL) add_hash_footer \
   --image $(1) \
-  --partition_size $(call get-bootimage-partition-size,$(1),boot-debug)\
+  --partition_size $(call get-bootimage-partition-size,$(1),$(2))\
   --partition_name boot $(INTERNAL_AVB_BOOT_TEST_SIGNING_ARGS) \
   $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
-$(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot-debug))
+$(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),$(2)))
 endef
 
 # $(1): output file
 define build-debug-bootimage-target
   $(MKBOOTIMG) --kernel $(PRODUCT_OUT)/$(subst .img,,$(subst boot-debug,kernel,$(notdir $(1)))) \
     $(INTERNAL_DEBUG_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $1
-  $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$1))
+  $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$1,boot-debug))
 endef
 
 # Depends on original boot.img and ramdisk-debug.img, to build the new boot-debug.img
@@ -2325,9 +2375,13 @@
 # -----------------------------------------------------------------
 # vendor debug ramdisk
 # Combines vendor ramdisk files and debug ramdisk files to build the vendor debug ramdisk.
-INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor-ramdisk-debug.cpio$(RAMDISK_EXT)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
+INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor_ramdisk-debug.cpio$(RAMDISK_EXT)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_FILES := $(INTERNAL_DEBUG_RAMDISK_FILES)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_RAMDISK_DIR := $(TARGET_VENDOR_RAMDISK_OUT)
+
+ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): PRIVATE_ADDITIONAL_DIR := $(TARGET_RECOVERY_ROOT_OUT)
+endif
 
 INTERNAL_VENDOR_DEBUG_RAMDISK_FILES := $(filter $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/%, \
     $(ALL_GENERATED_SOURCES) \
@@ -2337,16 +2391,22 @@
 # if BOARD_USES_RECOVERY_AS_BOOT is true. Otherwise, it will be $(PRODUCT_OUT)/vendor_debug_ramdisk.
 # But the path of $(VENDOR_DEBUG_RAMDISK_DIR) to build the vendor debug ramdisk, is always
 # $(PRODUCT_OUT)/vendor_debug_ramdisk.
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
-$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
-	$(call pretty,"Target vendor debug ram disk: $@")
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/debug_ramdisk
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): VENDOR_DEBUG_RAMDISK_DIR := $(PRODUCT_OUT)/vendor_debug_ramdisk
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_RAMDISK_TARGET) $(INSTALLED_DEBUG_RAMDISK_TARGET)
+$(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
 	mkdir -p $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)
 	touch $(TARGET_VENDOR_DEBUG_RAMDISK_OUT)/force_debuggable
 	$(foreach debug_file,$(DEBUG_RAMDISK_FILES), \
-	  cp -f $(debug_file) $(subst $(PRODUCT_OUT)/debug_ramdisk,$(PRODUCT_OUT)/vendor_debug_ramdisk,$(debug_file)) &&) true
-	rsync -a $(VENDOR_RAMDISK_DIR)/ $(VENDOR_DEBUG_RAMDISK_DIR)
-	$(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_DEBUG_RAMDISK_DIR) | $(COMPRESSION_COMMAND) > $@
+	  cp -f $(debug_file) $(patsubst $(DEBUG_RAMDISK_DIR)/%,$(VENDOR_DEBUG_RAMDISK_DIR)/%,$(debug_file)) &&) true
+	$(MKBOOTFS) -d $(TARGET_OUT) $(VENDOR_RAMDISK_DIR) $(VENDOR_DEBUG_RAMDISK_DIR) $(PRIVATE_ADDITIONAL_DIR) | $(COMPRESSION_COMMAND) > $@
+
+ifeq (true,$(BOARD_BUILD_VENDOR_RAMDISK_IMAGE))
+INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET := $(PRODUCT_OUT)/vendor_ramdisk-debug.img
+$(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
+	$(call pretty,"Target vendor debug ramdisk: $@")
+	$(copy-file-to-target)
+endif
 
 INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK := $(PRODUCT_OUT)/installed-files-vendor-ramdisk-debug.txt
 INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK := $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK:.txt=.json)
@@ -2355,9 +2415,9 @@
 
 # The vendor debug ramdisk will rsync from $(TARGET_VENDOR_RAMDISK_OUT) and $(INTERNAL_DEBUG_RAMDISK_FILES),
 # so we have to wait for the vendor debug ramdisk to be built before generating the installed file list.
-$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
 $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK): $(INTERNAL_VENDOR_DEBUG_RAMDISK_FILES) $(FILESLIST) $(FILESLIST_UTIL)
-	echo Installed file list: $@
+	@echo Installed file list: $@
 	mkdir -p $(dir $@)
 	rm -f $@
 	$(FILESLIST) $(VENDOR_DEBUG_RAMDISK_DIR) > $(@:.txt=.json)
@@ -2386,10 +2446,10 @@
 endif
 
 # Depends on vendor_boot.img and vendor-ramdisk-debug.cpio.gz to build the new vendor_boot-debug.img
-$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+$(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET)
 $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
 	$(call pretty,"Target vendor_boot debug image: $@")
-	$(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS) --vendor_boot $@
+	$(MKBOOTIMG) $(INTERNAL_VENDOR_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_ramdisk $(INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET) $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_ARGS) --vendor_boot $@
 	$(call assert-max-image-size,$@,$(BOARD_VENDOR_BOOTIMAGE_PARTITION_SIZE))
 	$(if $(BOARD_AVB_VENDOR_BOOT_KEY_PATH),$(call test-key-sign-vendor-bootimage,$@))
 
@@ -2426,7 +2486,7 @@
 
 $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(INSTALLED_DEBUG_RAMDISK_TARGET)
 $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_TEST_HARNESS_RAMDISK_FILES) | $(COMPRESSION_COMMAND_DEPS)
-	$(call pretty,"Target test harness ram disk: $@")
+	$(call pretty,"Target test harness ramdisk: $@")
 	rsync -a $(TEST_HARNESS_RAMDISK_SYNC_DIR)/ $(TEST_HARNESS_RAMDISK_ROOT_DIR)
 	$(call append-test-harness-props,$(ADDITIONAL_TEST_HARNESS_PROPERTIES),$(TEST_HARNESS_PROP_TARGET))
 	$(MKBOOTFS) -d $(TARGET_OUT) $(TEST_HARNESS_RAMDISK_ROOT_DIR) | $(COMPRESSION_COMMAND) > $@
@@ -2469,7 +2529,7 @@
 define build-boot-test-harness-target
   $(MKBOOTIMG) --kernel $(PRODUCT_OUT)/$(subst .img,,$(subst boot-test-harness,kernel,$(notdir $(1)))) \
     $(INTERNAL_TEST_HARNESS_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
-  $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@))
+  $(if $(BOARD_AVB_BOOT_KEY_PATH),$(call test-key-sign-bootimage,$@,boot-test-harness))
 endef
 
 # Build the new boot-test-harness.img, based on boot-debug.img and ramdisk-test-harness.img.
@@ -2562,10 +2622,10 @@
 $(INSTALLED_FILES_FILE): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON)
 $(INSTALLED_FILES_FILE): $(FULL_SYSTEMIMAGE_DEPS) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
@@ -2806,10 +2866,10 @@
 $(INSTALLED_FILES_FILE_SYSTEMOTHER): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEMOTHER)
 $(INSTALLED_FILES_FILE_SYSTEMOTHER) : $(INTERNAL_SYSTEMOTHERIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_SYSTEM_OTHER) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 # Determines partition size for system_other.img.
 ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
@@ -2890,10 +2950,10 @@
 $(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
 $(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
@@ -2942,10 +3002,10 @@
 $(INSTALLED_FILES_FILE_PRODUCT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_PRODUCT)
 $(INSTALLED_FILES_FILE_PRODUCT) : $(INTERNAL_PRODUCTIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 productimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,product)
@@ -2993,10 +3053,10 @@
 $(INSTALLED_FILES_FILE_SYSTEM_EXT): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_SYSTEM_EXT)
 $(INSTALLED_FILES_FILE_SYSTEM_EXT) : $(INTERNAL_SYSTEM_EXTIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 system_extimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,system_ext)
@@ -3064,10 +3124,10 @@
 $(INSTALLED_FILES_FILE_ODM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM)
 $(INSTALLED_FILES_FILE_ODM) : $(INTERNAL_ODMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 odmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,odm)
@@ -3115,10 +3175,10 @@
 $(INSTALLED_FILES_FILE_VENDOR_DLKM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR_DLKM)
 $(INSTALLED_FILES_FILE_VENDOR_DLKM) : $(INTERNAL_VENDOR_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 vendor_dlkmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor_dlkm)
@@ -3166,10 +3226,10 @@
 $(INSTALLED_FILES_FILE_ODM_DLKM): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_ODM_DLKM)
 $(INSTALLED_FILES_FILE_ODM_DLKM) : $(INTERNAL_ODM_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
-	@mkdir -p $(dir $@)
-	@rm -f $@
-	$(hide) $(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
-	$(hide) $(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
+	mkdir -p $(dir $@)
+	rm -f $@
+	$(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
+	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 odm_dlkmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,odm_dlkm)
@@ -3738,6 +3798,18 @@
 
 endif # BOARD_AVB_ENABLE
 
+# List of files from all images
+INTERNAL_ALLIMAGES_FILES := \
+    $(FULL_SYSTEMIMAGE_DEPS) \
+    $(INTERNAL_RAMDISK_FILES) \
+    $(INTERNAL_USERDATAIMAGE_FILES) \
+    $(INTERNAL_VENDORIMAGE_FILES) \
+    $(INTERNAL_PRODUCTIMAGE_FILES) \
+    $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
+    $(INTERNAL_ODMIMAGE_FILES) \
+    $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
+    $(INTERNAL_ODM_DLKMIMAGE_FILES) \
+
 # -----------------------------------------------------------------
 # Check VINTF of build
 
@@ -3756,13 +3828,7 @@
   $(TARGET_OUT_PRODUCT)/etc/vintf/% \
   $(TARGET_OUT_SYSTEM_EXT)/etc/vintf/% \
 
-check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns), \
-  $(INTERNAL_SYSTEMIMAGE_FILES) \
-  $(INTERNAL_VENDORIMAGE_FILES) \
-  $(INTERNAL_ODMIMAGE_FILES) \
-  $(INTERNAL_PRODUCTIMAGE_FILES) \
-  $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-))
+check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns),$(INTERNAL_ALLIMAGES_FILES)))
 check_vintf_common_srcs_patterns :=
 
 check_vintf_has_system :=
@@ -3780,13 +3846,13 @@
 ifneq ($(check_vintf_system_deps),)
 check_vintf_has_system := true
 
-check_vintf_system_log := $(intermediates)/check_vintf_system_log
+check_vintf_system_log := $(intermediates)/check_vintf_system.log
 check_vintf_all_deps += $(check_vintf_system_log)
 $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
 	@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 check_vintf_system_log :=
 
-vintffm_log := $(intermediates)/vintffm_log
+vintffm_log := $(intermediates)/vintffm.log
 check_vintf_all_deps += $(vintffm_log)
 $(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
@@ -3799,7 +3865,7 @@
 check_vintf_vendor_deps := $(filter $(TARGET_OUT_VENDOR)/etc/vintf/%, $(check_vintf_common_srcs))
 ifneq ($(check_vintf_vendor_deps),)
 check_vintf_has_vendor := true
-check_vintf_vendor_log := $(intermediates)/check_vintf_vendor_log
+check_vintf_vendor_log := $(intermediates)/check_vintf_vendor.log
 check_vintf_all_deps += $(check_vintf_vendor_log)
 # Check vendor SKU=(empty) case when:
 # - DEVICE_MANIFEST_FILE is not empty; OR
@@ -3829,7 +3895,7 @@
 ifeq ($(filter true,$(BUILDING_PRODUCT_IMAGE)),$(filter true,$(BOARD_USES_PRODUCTIMAGE)))
 ifeq ($(filter true,$(BUILDING_SYSTEM_EXT_IMAGE)),$(filter true,$(BOARD_USES_SYSTEM_EXTIMAGE)))
 
-check_vintf_compatible_log := $(intermediates)/check_vintf_compatible_log
+check_vintf_compatible_log := $(intermediates)/check_vintf_compatible.log
 check_vintf_all_deps += $(check_vintf_compatible_log)
 
 check_vintf_compatible_args :=
@@ -3977,7 +4043,7 @@
   $(CHECK_PARTITION_SIZES) $(if $(2),--logfile $(2),-v) $(1)
 endef
 
-check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes_log
+check_all_partition_sizes_log := $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/check_all_partition_sizes.log
 droid_targets: $(check_all_partition_sizes_log)
 $(call dist-for-goals, droid_targets, $(check_all_partition_sizes_log))
 
@@ -4097,6 +4163,8 @@
   mksquashfsimage.sh \
   mkuserimg_mke2fs \
   ota_from_target_files \
+  repack_bootimg \
+  secilc \
   sefcontext_compile \
   sgdisk \
   shflags \
@@ -4117,6 +4185,7 @@
 # Additional tools to unpack and repack the apex file.
 INTERNAL_OTATOOLS_MODULES += \
   apexer \
+  apex_compression_tool \
   deapexer \
   debugfs_static \
   merge_zips \
@@ -4485,11 +4554,26 @@
 (cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) -R "$(2)"
 endef
 
-# Filter out vendor from the list for AOSP targets.
-# $(1): list
 define filter-out-missing-vendor
 $(if $(INSTALLED_VENDORIMAGE_TARGET),$(1),$(filter-out vendor,$(1)))
 endef
+define filter-out-missing-vendor_dlkm
+$(if $(INSTALLED_VENDOR_DLKMIMAGE_TARGET),$(1),$(filter-out vendor_dlkm,$(1)))
+endef
+define filter-out-missing-odm
+$(if $(INSTALLED_ODMIMAGE_TARGET),$(1),$(filter-out odm,$(1)))
+endef
+define filter-out-missing-odm_dlkm
+$(if $(INSTALLED_ODM_DLKMIMAGE_TARGET),$(1),$(filter-out odm_dlkm,$(1)))
+endef
+# Filter out vendor,vendor_dlkm,odm,odm_dlkm from the list for AOSP targets.
+# $(1): list
+define filter-out-missing-partitions
+$(call filter-out-missing-vendor,\
+  $(call filter-out-missing-vendor_dlkm,\
+    $(call filter-out-missing-odm,\
+      $(call filter-out-missing-odm_dlkm,$(1)))))
+endef
 
 # Information related to dynamic partitions and virtual A/B. This information
 # is needed for building the super image (see dump-super-image-info) and
@@ -4511,13 +4595,13 @@
   $(foreach device,$(BOARD_SUPER_PARTITION_BLOCK_DEVICES), \
     echo "super_$(device)_device_size=$(BOARD_SUPER_PARTITION_$(call to-upper,$(device))_DEVICE_SIZE)" >> $(1);)
   $(if $(BOARD_SUPER_PARTITION_PARTITION_LIST), \
-    echo "dynamic_partition_list=$(call filter-out-missing-vendor,$(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
+    echo "dynamic_partition_list=$(call filter-out-missing-partitions,$(BOARD_SUPER_PARTITION_PARTITION_LIST))" >> $(1))
   $(if $(BOARD_SUPER_PARTITION_GROUPS),
     echo "super_partition_groups=$(BOARD_SUPER_PARTITION_GROUPS)" >> $(1))
   $(foreach group,$(BOARD_SUPER_PARTITION_GROUPS), \
     echo "super_$(group)_group_size=$(BOARD_$(call to-upper,$(group))_SIZE)" >> $(1); \
     $(if $(BOARD_$(call to-upper,$(group))_PARTITION_LIST), \
-      echo "super_$(group)_partition_list=$(call filter-out-missing-vendor,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
+      echo "super_$(group)_partition_list=$(call filter-out-missing-partitions,$(BOARD_$(call to-upper,$(group))_PARTITION_LIST))" >> $(1);))
   $(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)), \
     echo "build_non_sparse_super_partition=true" >> $(1))
   $(if $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)), \
@@ -4534,6 +4618,8 @@
     echo "super_partition_error_limit=$(BOARD_SUPER_PARTITION_ERROR_LIMIT)" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA)), \
     echo "virtual_ab=true" >> $(1))
+  $(if $(filter true,$(PRODUCT_VIRTUAL_AB_COMPRESSION)), \
+    echo "virtual_ab_compression=true" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
     echo "virtual_ab_retrofit=true" >> $(1))
 endef
@@ -4556,11 +4642,15 @@
 ifdef BUILDING_VENDOR_BOOT_IMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+  # The vendor ramdisk may be built from the recovery ramdisk.
+  ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+    $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+  endif
 endif
 
 ifdef BUILDING_RECOVERY_IMAGE
   # TODO(b/30414428): Can't depend on INTERNAL_RECOVERYIMAGE_FILES alone like other
-  # BUILD_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
+  # BUILT_TARGET_FILES_PACKAGE dependencies because currently there're cp/rsync/rm
   # commands in build-recoveryimage-target, which would touch the files under
   # TARGET_RECOVERY_OUT and race with packaging target-files.zip.
   ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
@@ -4608,13 +4698,13 @@
 
 ifdef BUILDING_VENDOR_DLKM_IMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
-else ifdef BOARD_PREBUILT_VENDOR_DLKIMMAGE
+else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_DLKM_IMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
-else ifdef BOARD_ODM_VENDOR_DLKIMMAGE
+else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
   $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 endif
 
@@ -4751,6 +4841,9 @@
 ifdef INSTALLED_DTBIMAGE_TARGET
 	cp $(INSTALLED_DTBIMAGE_TARGET) $(zip_root)/VENDOR_BOOT/dtb
 endif
+ifdef INTERNAL_VENDOR_BOOTCONFIG_TARGET
+	cp $(INTERNAL_VENDOR_BOOTCONFIG_TARGET) $(zip_root)/VENDOR_BOOT/vendor_bootconfig
+endif
 ifdef BOARD_KERNEL_BASE
 	echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/VENDOR_BOOT/base
 endif
@@ -5012,7 +5105,7 @@
 	@# help early validation of the .zip file while uploading it.
 	$(hide) find $(zip_root)/META | sort >$@.list
 	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
-	$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -l $@.list
+	$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -r $@.list
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
@@ -5130,16 +5223,7 @@
 SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(SYMBOLS_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
-	    $(INTERNAL_RAMDISK_FILES) \
-	    $(INTERNAL_USERDATAIMAGE_FILES) \
-	    $(INTERNAL_VENDORIMAGE_FILES) \
-	    $(INTERNAL_PRODUCTIMAGE_FILES) \
-	    $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-	    $(INTERNAL_ODMIMAGE_FILES) \
-	    $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
-	    $(INTERNAL_ODM_DLKMIMAGE_FILES) \
-	    $(updater_dep)
+$(SYMBOLS_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
 $(SYMBOLS_ZIP): $(SOONG_ZIP)
@@ -5157,15 +5241,7 @@
 endif
 COVERAGE_ZIP := $(PRODUCT_OUT)/$(name).zip
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(COVERAGE_ZIP): $(FULL_SYSTEMIMAGE_DEPS) \
-	    $(INTERNAL_RAMDISK_FILES) \
-	    $(INTERNAL_USERDATAIMAGE_FILES) \
-	    $(INTERNAL_VENDORIMAGE_FILES) \
-	    $(INTERNAL_PRODUCTIMAGE_FILES) \
-	    $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-	    $(INTERNAL_ODMIMAGE_FILES) \
-	    $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
-	    $(INTERNAL_ODM_DLKMIMAGE_FILES)
+$(COVERAGE_ZIP): $(INTERNAL_ALLIMAGES_FILES)
 endif
 $(COVERAGE_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,coverage)/filelist
 $(COVERAGE_ZIP): $(SOONG_ZIP)
@@ -5224,6 +5300,9 @@
 	find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "jacoco-report-classes.jar" -o -name "proguard_usage.zip" 2>/dev/null | sort > $@.list
 	$(SOONG_ZIP) -o $@ -L 0 -C $(OUT_DIR) -P out -l $@.list
 
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+  $(JACOCO_REPORT_CLASSES_ALL): $(INTERNAL_ALLIMAGES_FILES)
+endif
 endif # EMMA_INSTRUMENT=true
 
 
@@ -5233,17 +5312,7 @@
 PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
-$(PROGUARD_DICT_ZIP): \
-    $(FULL_SYSTEMIMAGE_DEPS) \
-    $(INTERNAL_RAMDISK_FILES) \
-    $(INTERNAL_USERDATAIMAGE_FILES) \
-    $(INTERNAL_VENDORIMAGE_FILES) \
-    $(INTERNAL_PRODUCTIMAGE_FILES) \
-    $(INTERNAL_SYSTEM_EXTIMAGE_FILES) \
-    $(INTERNAL_ODMIMAGE_FILES) \
-    $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
-    $(INTERNAL_ODM_DLKMIMAGE_FILES) \
-    $(updater_dep)
+$(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard)/filelist
 $(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 883f92d..c9fcf47 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -33,4 +33,29 @@
   $(call add_soong_config_namespace,art_module)
   SOONG_CONFIG_art_module += source_build
 endif
-SOONG_CONFIG_art_module_source_build ?= true
+ifneq (,$(findstring .android.art,$(TARGET_BUILD_APPS)))
+  # Build ART modules from source if they are listed in TARGET_BUILD_APPS.
+  SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(filter true,$(NATIVE_COVERAGE) $(CLANG_COVERAGE)))
+  # Always build ART APEXes from source in coverage builds since the prebuilts
+  # aren't built with instrumentation.
+  # TODO(b/172480617): Find another solution for this.
+  SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(SANITIZE_TARGET)$(SANITIZE_HOST))
+  # Prebuilts aren't built with sanitizers either.
+  SOONG_CONFIG_art_module_source_build := true
+else ifneq (,$(PRODUCT_FUCHSIA))
+  # Fuchsia picks out ART internal packages that aren't available in the
+  # prebuilt.
+  SOONG_CONFIG_art_module_source_build := true
+else
+  # This sets the default for building ART APEXes from source rather than
+  # prebuilts (in packages/modules/ArtPrebuilt and prebuilt/module_sdk/art) in
+  # all other platform builds.
+  SOONG_CONFIG_art_module_source_build ?= true
+endif
+
+# Apex build mode variables
+ifdef APEX_BUILD_FOR_PRE_S_DEVICES
+$(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
+endif
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 5767996..d47930c 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -102,19 +102,30 @@
   endif
 endif
 
+# Disable verify_uses_libraries check if dexpreopt is globally disabled.
+# Without dexpreopt the check is not necessary, and although it is good to have,
+# it is difficult to maintain on non-linux build platforms where dexpreopt is
+# generally disabled (the check may fail due to various unrelated reasons, such
+# as a failure to get manifest from an APK).
+ifneq ($(WITH_DEXPREOPT),true)
+  LOCAL_ENFORCE_USES_LIBRARIES :=
+endif
+
 my_enforced_uses_libraries :=
 ifdef LOCAL_ENFORCE_USES_LIBRARIES
-  my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.timestamp
+  my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
   $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(LOCAL_USES_LIBRARIES)
   $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(LOCAL_OPTIONAL_USES_LIBRARIES)
+  $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(RELAX_USES_LIBRARY_CHECK)
   $(my_enforced_uses_libraries): $(BUILD_SYSTEM)/verify_uses_libraries.sh $(AAPT)
   $(my_enforced_uses_libraries): $(my_prebuilt_src_file)
 	@echo Verifying uses-libraries: $<
+	rm -f $@
 	aapt_binary=$(AAPT) \
 	  uses_library_names="$(strip $(PRIVATE_USES_LIBRARIES))" \
 	  optional_uses_library_names="$(strip $(PRIVATE_OPTIONAL_USES_LIBRARIES))" \
-	  $(BUILD_SYSTEM)/verify_uses_libraries.sh $<
-	touch $@
+	  relax_check="$(strip $(PRIVATE_RELAX_CHECK))" \
+	  $(BUILD_SYSTEM)/verify_uses_libraries.sh $< $@
   $(built_module) : $(my_enforced_uses_libraries)
 endif
 
@@ -237,6 +248,7 @@
 $(built_module) : $(my_prebuilt_src_file) | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR)
 	$(transform-prebuilt-to-target)
 	$(uncompress-prebuilt-embedded-jni-libs)
+	$(remove-unwanted-prebuilt-embedded-jni-libs)
 ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 	$(uncompress-dexs)
 endif  # LOCAL_UNCOMPRESS_DEX
diff --git a/core/base_rules.mk b/core/base_rules.mk
index dbd8930..68f880f 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -508,6 +508,7 @@
 ###########################################################
 
 my_init_rc_installed :=
+my_init_rc_path :=
 my_init_rc_pairs :=
 my_installed_symlinks :=
 my_default_test_module :=
@@ -534,7 +535,11 @@
 # Rule to install the module's companion init.rc.
 my_init_rc := $(LOCAL_INIT_RC_$(my_32_64_bit_suffix)) $(LOCAL_INIT_RC)
 ifneq ($(strip $(my_init_rc)),)
-my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(LOCAL_PATH)/$(rc):$(TARGET_OUT$(partition_tag)_ETC)/init/$(notdir $(rc)))
+# Make doesn't support recovery as an output partition, but some Soong modules installed in recovery
+# have init.rc files that need to be installed alongside them. Manually handle the case where the
+# output file is in the recovery partition.
+my_init_rc_path := $(if $(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)),$(TARGET_RECOVERY_ROOT_OUT)/system/etc,$(TARGET_OUT$(partition_tag)_ETC))
+my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(LOCAL_PATH)/$(rc):$(my_init_rc_path)/init/$(notdir $(rc)))
 my_init_rc_installed := $(foreach rc,$(my_init_rc_pairs),$(call word-colon,2,$(rc)))
 
 # Make sure we only set up the copy rules once, even if another arch variant
@@ -996,6 +1001,7 @@
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).FILE_CONTEXTS := $(LOCAL_FILE_CONTEXTS)
 endif
+ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
 test_config :=
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index be008e6..fa36d64 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1333,11 +1333,6 @@
 
 my_c_includes += $(TOPDIR)$(LOCAL_PATH) $(intermediates) $(generated_sources_dir)
 
-# The platform JNI header is for platform modules only.
-ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_USE_VNDK),)
-  my_c_includes += $(JNI_H_INCLUDE)
-endif
-
 my_c_includes := $(foreach inc,$(my_c_includes),$(call clean-path,$(inc)))
 
 my_outside_includes := $(filter-out $(OUT_DIR)/%,$(filter /%,$(my_c_includes)) $(filter ../%,$(my_c_includes)))
diff --git a/core/board_config.mk b/core/board_config.mk
index 725c0a5..245a639 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -25,6 +25,7 @@
 _board_strip_readonly_list += BOARD_HAVE_BLUETOOTH
 _board_strip_readonly_list += BOARD_INSTALLER_CMDLINE
 _board_strip_readonly_list += BOARD_KERNEL_CMDLINE
+_board_strip_readonly_list += BOARD_BOOTCONFIG
 _board_strip_readonly_list += BOARD_KERNEL_BASE
 _board_strip_readonly_list += BOARD_USES_GENERIC_AUDIO
 _board_strip_readonly_list += BOARD_USES_RECOVERY_AS_BOOT
@@ -129,6 +130,7 @@
   BUILD_BROKEN_PREBUILT_ELF_FILES \
   BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
   BUILD_BROKEN_USES_NETWORK \
+  BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
 
 _build_broken_var_list += \
@@ -221,6 +223,7 @@
 .KATI_READONLY := $(_board_strip_readonly_list)
 
 INTERNAL_KERNEL_CMDLINE := $(BOARD_KERNEL_CMDLINE)
+INTERNAL_BOOTCONFIG := $(BOARD_BOOTCONFIG)
 
 ifneq ($(filter %64,$(TARGET_ARCH)),)
   TARGET_IS_64_BIT := true
diff --git a/core/clang/tidy.mk b/core/clang/tidy.mk
index 868f7bc..8a40878 100644
--- a/core/clang/tidy.mk
+++ b/core/clang/tidy.mk
@@ -36,7 +36,7 @@
 )
 endef
 
-# Default filter contains current directory $1 and DEFAULT_TIDY_HEADER_DIRS.
+# Default filter contains current directory $1 and optional DEFAULT_TIDY_HEADER_DIRS.
 define default_tidy_header_filter
-  -header-filter="($(subst $(space),,$1|$(DEFAULT_TIDY_HEADER_DIRS)))"
+  -header-filter=$(if $(DEFAULT_TIDY_HEADER_DIRS),"($1/|$(DEFAULT_TIDY_HEADER_DIRS))",$1/)
 endef
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 5f16363..019892e 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -243,6 +243,7 @@
 # lite(default),micro,nano,stream,full,nanopb-c,nanopb-c-enable_malloc,nanopb-c-16bit,nanopb-c-enable_malloc-16bit,nanopb-c-32bit,nanopb-c-enable_malloc-32bit
 LOCAL_PROTOC_OPTIMIZE_TYPE:=
 LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
+LOCAL_PROVIDES_USES_LIBRARY:=
 LOCAL_R8_FLAG_FILES:=
 LOCAL_RECORDED_MODULE_TYPE:=
 LOCAL_RENDERSCRIPT_CC:=
@@ -276,6 +277,7 @@
 LOCAL_SOONG_BUNDLE :=
 LOCAL_SOONG_CLASSES_JAR :=
 LOCAL_SOONG_DEX_JAR :=
+LOCAL_SOONG_DEXPREOPT_CONFIG :=
 LOCAL_SOONG_EXPORT_PROGUARD_FLAGS :=
 LOCAL_SOONG_HEADER_JAR :=
 LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR :=
diff --git a/target/board/module_arm/device.mk b/core/combo/arch/arm64/armv8-a-branchprot.mk
similarity index 70%
copy from target/board/module_arm/device.mk
copy to core/combo/arch/arm64/armv8-a-branchprot.mk
index 7cac5f8..77f3535 100644
--- a/target/board/module_arm/device.mk
+++ b/core/combo/arch/arm64/armv8-a-branchprot.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,5 +14,6 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+# .mk file required to support build for the new armv8-a-branchprot Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/config.mk b/core/config.mk
index 9c04fe0..6a99a6c 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -149,14 +149,14 @@
   2ND_TARGET_PROJECT_SYSTEM_INCLUDES \
   ,Project include variables have been removed)
 $(KATI_obsolete_var TARGET_PREFER_32_BIT TARGET_PREFER_32_BIT_APPS TARGET_PREFER_32_BIT_EXECUTABLES)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST.)
-$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST.)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_SYSTEM_CERTIFICATE_REQUIREMENT_ALLOW_LIST)
+$(KATI_obsolete_var PRODUCT_ARTIFACT_PATH_REQUIREMENT_WHITELIST,Use PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST)
 $(KATI_obsolete_var COVERAGE_PATHS,Use NATIVE_COVERAGE_PATHS instead)
 $(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
-$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported.)
-$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead.)
-$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead.)
-$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead.)
+$(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported)
+$(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead)
+$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
+$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
 $(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
@@ -479,6 +479,17 @@
 USE_D8 := true
 .KATI_READONLY := USE_D8
 
+# Whether to fail immediately if verify_uses_libraries check fails, or to keep
+# going and restrict dexpreopt to not compile any code for the failed module.
+#
+# The intended use case for this flag is to have a smoother migration path for
+# the Java modules that need to add <uses-library> information in their build
+# files. The flag allows to quickly silence build errors. This flag should be
+# used with caution and only as a temporary measure, as it masks real errors
+# and affects performance.
+RELAX_USES_LIBRARY_CHECK ?= false
+.KATI_READONLY := RELAX_USES_LIBRARY_CHECK
+
 #
 # Tools that are prebuilts for TARGET_BUILD_USE_PREBUILT_SDKS
 #
@@ -990,6 +1001,13 @@
 
 endif # PRODUCT_USE_DYNAMIC_PARTITIONS
 
+# By default, we build the hidden API csv files from source. You can use
+# prebuilt hiddenapi files by setting BOARD_PREBUILT_HIDDENAPI_DIR to the name
+# of a directory containing both prebuilt hiddenapi-flags.csv and
+# hiddenapi-index.csv.
+BOARD_PREBUILT_HIDDENAPI_DIR ?=
+.KATI_READONLY := BOARD_PREBUILT_HIDDENAPI_DIR
+
 # ###############################################################
 # Set up final options.
 # ###############################################################
@@ -1145,8 +1163,11 @@
 dont_bother_goals := out \
     product-graph dump-products
 
-ifeq ($(CALLED_FROM_SETUP),true)
+# Make ANDROID Soong config variables visible to Android.mk files, for
+# consistency with those defined in BoardConfig.mk files.
 include $(BUILD_SYSTEM)/android_soong_config_vars.mk
+
+ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ninja_config.mk
 include $(BUILD_SYSTEM)/soong_config.mk
 endif
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index e466328..228bad6 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -53,6 +53,18 @@
   endif
 endif
 
+# Disable global memtag_heap in excluded paths
+ifneq ($(filter memtag_heap, $(my_global_sanitize)),)
+  combined_exclude_paths := $(MEMTAG_HEAP_EXCLUDE_PATHS) \
+                            $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS)
+
+  ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_exclude_paths)),\
+         $(filter $(dir)%,$(LOCAL_PATH)))),)
+    my_global_sanitize := $(filter-out memtag_heap,$(my_global_sanitize))
+    my_global_sanitize_diag := $(filter-out memtag_heap,$(my_global_sanitize_diag))
+  endif
+endif
+
 ifneq ($(my_global_sanitize),)
   my_sanitize := $(my_global_sanitize) $(my_sanitize)
 endif
@@ -116,6 +128,25 @@
   endif
 endif
 
+# Enable memtag_heap in included paths (for Arm64 only).
+ifeq ($(filter memtag_heap, $(my_sanitize)),)
+  ifneq ($(filter arm64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
+    combined_sync_include_paths := $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) \
+                                   $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS)
+    combined_async_include_paths := $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) \
+                                    $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS)
+
+    ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_sync_include_paths)),\
+           $(filter $(dir)%,$(LOCAL_PATH)))),)
+      my_sanitize := memtag_heap $(my_sanitize)
+      my_sanitize_diag := memtag_heap $(my_sanitize)
+    else ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_async_include_paths)),\
+           $(filter $(dir)%,$(LOCAL_PATH)))),)
+      my_sanitize := memtag_heap $(my_sanitize)
+    endif
+  endif
+endif
+
 # If CFI is disabled globally, remove it from my_sanitize.
 ifeq ($(strip $(ENABLE_CFI)),false)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
@@ -158,6 +189,7 @@
 
 ifneq ($(filter arm x86 x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
   my_sanitize := $(filter-out hwaddress,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
 endif
 
 ifneq ($(filter hwaddress,$(my_sanitize)),)
@@ -177,6 +209,20 @@
   endif
 endif
 
+ifneq ($(filter memtag_heap,$(my_sanitize)),)
+  # Add memtag ELF note.
+  ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+    my_whole_static_libraries += note_memtag_heap_sync
+  else
+    my_whole_static_libraries += note_memtag_heap_async
+  endif
+  # This is all that memtag_heap does - it is not an actual -fsanitize argument.
+  # Remove it from the list.
+  my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
+endif
+
+my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+
 # TSAN is not supported on 32-bit architectures. For non-multilib cases, make
 # its use an error. For multilib cases, don't use it for the 32-bit case.
 ifneq ($(filter thread,$(my_sanitize)),)
@@ -435,3 +481,13 @@
     endif
   endif
 endif
+
+# http://b/177566116, libc++ may crash with this sanitizer.
+# Disable this check unless it has been explicitly specified.
+ifneq ($(findstring fsanitize,$(my_cflags)),)
+  ifneq ($(findstring integer,$(my_cflags)),)
+    ifeq ($(findstring sanitize=unsigned-shift-base,$(my_cflags)),)
+      my_cflags += -fno-sanitize=unsigned-shift-base
+    endif
+  endif
+endif
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index a2abb1a..f71ef72 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -78,18 +78,12 @@
         my_static_libraries += libc++demangle
 
         ifeq ($(my_link_type),static)
-            my_static_libraries += libm libc
-            ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
-                my_static_libraries += libunwind_llvm
-                my_ldflags += -Wl,--exclude-libs,libunwind_llvm.a
-            else
-                my_static_libraries += libgcc_stripped
-                my_ldflags += -Wl,--exclude-libs,libgcc_stripped.a
-            endif
+            my_static_libraries += libm libc libunwind
         endif
     endif
 else ifeq ($(my_cxx_stl),ndk)
     # Using an NDK STL. Handled in binary.mk, except for the unwinder.
+    # TODO: Switch the NDK over to the LLVM unwinder for non-arm32 architectures.
     ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
         my_static_libraries += libunwind_llvm
         my_ldflags += -Wl,--exclude-libs,libunwind_llvm.a
diff --git a/core/definitions.mk b/core/definitions.mk
index 4300efe..2883f0d 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -556,7 +556,7 @@
   $(foreach m, $(ALL_MODULES), \
     $(eval ALL_MODULES.$(m).NOTICE_DEPS := \
       $(sort \
-         $(foreach d,$(ALL_MODULES.$(m).NOTICE_DEPS), \
+         $(foreach d,$(sort $(ALL_MODULES.$(m).NOTICE_DEPS)), \
            $(_lookup.$(d)) \
         ) \
       ) \
@@ -578,7 +578,9 @@
 define license-metadata-rule
 $(strip $(eval _dir := $(call license-metadata-dir)))
 $(strip $(eval _deps := $(sort $(filter-out $(_dir)/$(1).meta_lic,$(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS), $(_dir)/$(d).meta_lic)))))
-$(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)),
+$(strip $(eval _notices := $(sort $(ALL_MODULES.$(1).NOTICES))))
+$(strip $(eval _tgts := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))))
+$(foreach b,$(_tgts),
 $(_dir)/$(b).meta_module ::
 	mkdir -p $$(dir $$@)
 	echo $(_dir)/$(1).meta_lic >> $$@
@@ -587,31 +589,46 @@
 )
 $(_dir)/$(1).meta_lic: PRIVATE_KINDS := $(sort $(ALL_MODULES.$(1).LICENSE_KINDS))
 $(_dir)/$(1).meta_lic: PRIVATE_CONDITIONS := $(sort $(ALL_MODULES.$(1).LICENSE_CONDITIONS))
-$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(sort $(ALL_MODULES.$(1).NOTICES))
+$(_dir)/$(1).meta_lic: PRIVATE_NOTICES := $(_notices)
 $(_dir)/$(1).meta_lic: PRIVATE_NOTICE_DEPS := $(_deps)
-$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED))
-$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(sort $(ALL_MODULES.$(1).IS_CONTAINER))
-$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME)
+$(_dir)/$(1).meta_lic: PRIVATE_TARGETS := $(_tgts)
+$(_dir)/$(1).meta_lic: PRIVATE_IS_CONTAINER := $(ALL_MODULES.$(1).IS_CONTAINER)
+$(_dir)/$(1).meta_lic: PRIVATE_PACKAGE_NAME := $(strip $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME))
 $(_dir)/$(1).meta_lic: PRIVATE_INSTALL_MAP := $(sort $(ALL_MODULES.$(1).LICENSE_INSTALL_MAP))
-$(_dir)/$(1).meta_lic : $(_deps) $(ALL_MODULES.$(1).NOTICES) $(foreach b,$(sort $(ALL_MODULES.$(1).BUILT) $(ALL_MODULES.$(1).INSTALLED)), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
+$(_dir)/$(1).meta_lic : $(_deps) $(_notices) $(foreach b,$(_tgts), $(_dir)/$(b).meta_module) build/make/tools/build-license-metadata.sh
 	rm -f $$@
 	mkdir -p $$(dir $$@)
-	build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(filter-out false,$$(PRIVATE_IS_CONTAINER)),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
-
-$(1) : $(_dir)/$(1).meta_lic
-
-$(if $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE),$(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE) : $(_dir)/$(1).meta_lic)
+	build/make/tools/build-license-metadata.sh -k $$(PRIVATE_KINDS) -c $$(PRIVATE_CONDITIONS) -n $$(PRIVATE_NOTICES) -d $$(PRIVATE_NOTICE_DEPS) -m $$(PRIVATE_INSTALL_MAP) -t $$(PRIVATE_TARGETS) $$(if $$(PRIVATE_IS_CONTAINER),-is_container) -p $$(PRIVATE_PACKAGE_NAME) -o $$@
 
 .PHONY: $(1).meta_lic
 $(1).meta_lic : $(_dir)/$(1).meta_lic
 
+$(strip $(eval _mifs := $(sort $(ALL_MODULES.$(1).MODULE_INSTALLED_FILENAMES))))
+$(strip $(eval _infs := $(sort $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE))))
+
+# Emit each installed notice file rule if it references the current module
+$(if $(_infs),$(foreach inf,$(_infs),
+$(if $(strip $(filter $(1),$(INSTALLED_NOTICE_FILES.$(inf).MODULE))),
+$(strip $(eval _mif := $(firstword $(foreach m,$(_mifs),$(if $(filter %/src/$(m).txt,$(inf)),$(m))))))
+
+$(inf) : $(_dir)/$(1).meta_lic
+$(inf): PRIVATE_INSTALLED_MODULE := $(_mif)
+$(inf) : PRIVATE_NOTICES := $(_notices)
+
+$(inf): $(_notices)
+	@echo Notice file: $$< -- $$@
+	mkdir -p $$(dir $$@)
+	awk 'FNR==1 && NR > 1 {print "\n"} {print}' $$(PRIVATE_NOTICES) > $$@
+
+)))
+
 endef
 
 ###########################################################
 ## Declares a license metadata build rule for ALL_MODULES
 ###########################################################
 define build-license-metadata
-$(foreach m,$(ALL_MODULES),$(eval $(call license-metadata-rule,$(m))))
+$(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m))))
 endef
 
 ###########################################################
@@ -632,14 +649,6 @@
 endef
 
 ###########################################################
-## Convert install path to on-device path.
-###########################################################
-# $(1): install path
-define install-path-to-on-device-path
-$(patsubst $(PRODUCT_OUT)%,%,$(1))
-endef
-
-###########################################################
 ## The intermediates directory.  Where object files go for
 ## a given target.  We could technically get away without
 ## the "_intermediates" suffix on the directory, but it's
@@ -2146,6 +2155,17 @@
 $(hide) $(call commit-change-for-toc,$@)
 endef
 
+# Runs jarjar on an input file.  Jarjar doesn't exit with a nonzero return code
+# when there is a syntax error in a rules file and doesn't write the output
+# file, so removes the output file before running jarjar and check if it exists
+# after running jarjar.
+define transform-jarjar
+echo $($(PRIVATE_PREFIX)DISPLAY) JarJar: $@
+rm -f $@
+$(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+[ -e $@ ] || (echo "Missing output file"; exit 1)
+endef
+
 # Moves $1.tmp to $1 if necessary. This is designed to be used with
 # .KATI_RESTAT. For kati, this function doesn't update the timestamp
 # of $1 when $1.tmp is identical to $1 so that ninja won't rebuild
@@ -2393,14 +2413,19 @@
 #
 define uncompress-prebuilt-embedded-jni-libs
   if (zipinfo $@ 'lib/*.so' 2>/dev/null | grep -v ' stor ' >/dev/null) ; then \
-    $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' \
-      $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
-        -x 'lib/**/*.so' \
-        $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS))) && \
-    mv -f $@.tmp $@ ; \
+    $(ZIP2ZIP) -i $@ -o $@.tmp -0 'lib/**/*.so' && mv -f $@.tmp $@ ; \
   fi
 endef
 
+# Remove unwanted shared JNI libraries embedded in an apk.
+#
+define remove-unwanted-prebuilt-embedded-jni-libs
+  $(if $(PRIVATE_EMBEDDED_JNI_LIBS), \
+    $(ZIP2ZIP) -i $@ -o $@.tmp \
+      -x 'lib/**/*.so' $(addprefix -X ,$(PRIVATE_EMBEDDED_JNI_LIBS)) && \
+    mv -f $@.tmp $@)
+endef
+
 # TODO(joeo): If we can ever upgrade to post 3.81 make and get the
 # new prebuilt rules to work, we should change this to copy the
 # resources to the out directory and then copy the resources.
@@ -2762,7 +2787,8 @@
     $(R8_DEBUG_MODE) \
     $(PRIVATE_PROGUARD_FLAGS) \
     $(addprefix -injars , $(PRIVATE_EXTRA_INPUT_JAR)) \
-    $(PRIVATE_DX_FLAGS)
+    $(PRIVATE_DX_FLAGS) \
+    -ignorewarnings
 $(hide) touch $(PRIVATE_PROGUARD_DICTIONARY)
 endef
 
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index f72752c..dda7de0 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -1,16 +1,24 @@
 DEX_PREOPT_CONFIG := $(SOONG_OUT_DIR)/dexpreopt.config
 
 ENABLE_PREOPT := true
+ENABLE_PREOPT_BOOT_IMAGES := true
 ifneq (true,$(filter true,$(WITH_DEXPREOPT)))
+  # Disable dexpreopt for libraries/apps and for boot images.
   ENABLE_PREOPT :=
+  ENABLE_PREOPT_BOOT_IMAGES :=
 else ifneq (true,$(filter true,$(PRODUCT_USES_DEFAULT_ART_CONFIG)))
+  # Disable dexpreopt for libraries/apps and for boot images: not having default
+  # ART config means that some important system properties are not set, which
+  # would result in passing bad arguments to dex2oat and failing the build.
   ENABLE_PREOPT :=
+  ENABLE_PREOPT_BOOT_IMAGES :=
 else ifeq (true,$(DISABLE_PREOPT))
+  # Disable dexpreopt for libraries/apps, but do compile boot images.
   ENABLE_PREOPT :=
 endif
 
 # The default value for LOCAL_DEX_PREOPT
-DEX_PREOPT_DEFAULT ?= true
+DEX_PREOPT_DEFAULT ?= $(ENABLE_PREOPT)
 
 # The default filter for which files go into the system_other image (if it is
 # being used). Note that each pattern p here matches both '/<p>' and /system/<p>'.
@@ -46,14 +54,6 @@
   endif
 endif
 
-# Use the first preloaded-classes file in PRODUCT_COPY_FILES.
-PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
-    $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
-
-# Use the first dirty-image-objects file in PRODUCT_COPY_FILES.
-DIRTY_IMAGE_OBJECTS := $(call word-colon,1,$(firstword \
-    $(filter %system/etc/dirty-image-objects,$(PRODUCT_COPY_FILES))))
-
 # Get value of a property. It is first searched from PRODUCT_VENDOR_PROPERTIES
 # and then falls back to PRODUCT_SYSTEM_PROPERTIES
 # $1: name of the property
@@ -73,6 +73,7 @@
   $(call json_start)
 
   $(call add_json_bool, DisablePreopt,                           $(call invert_bool,$(ENABLE_PREOPT)))
+  $(call add_json_bool, DisablePreoptBootImages,                 $(call invert_bool,$(ENABLE_PREOPT_BOOT_IMAGES)))
   $(call add_json_list, DisablePreoptModules,                    $(DEXPREOPT_DISABLED_MODULES))
   $(call add_json_bool, OnlyPreoptBootImageAndSystemServer,      $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
   $(call add_json_bool, UseArtImage,                             $(filter true,$(DEXPREOPT_USE_ART_IMAGE)))
@@ -104,6 +105,7 @@
   $(call add_json_bool, IsEng,                                   $(filter eng,$(TARGET_BUILD_VARIANT)))
   $(call add_json_bool, SanitizeLite,                            $(SANITIZE_LITE))
   $(call add_json_bool, DefaultAppImages,                        $(WITH_DEX_PREOPT_APP_IMAGE))
+  $(call add_json_bool, RelaxUsesLibraryCheck,                   $(filter true,$(RELAX_USES_LIBRARY_CHECK)))
   $(call add_json_str,  Dex2oatXmx,                              $(DEX2OAT_XMX))
   $(call add_json_str,  Dex2oatXms,                              $(DEX2OAT_XMS))
   $(call add_json_str,  EmptyDirectory,                          $(OUT_DIR)/empty)
@@ -124,7 +126,6 @@
   $(call end_json_map)
 endif
 
-  $(call add_json_str,  DirtyImageObjects,                  $(DIRTY_IMAGE_OBJECTS))
   $(call add_json_list, BootImageProfiles,                  $(PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION))
   $(call add_json_str,  BootFlags,                          $(PRODUCT_DEX_PREOPT_BOOT_FLAGS))
   $(call add_json_str,  Dex2oatImageXmx,                    $(DEX2OAT_IMAGE_XMX))
diff --git a/core/dex_preopt_config_merger.py b/core/dex_preopt_config_merger.py
new file mode 100755
index 0000000..4efcc17
--- /dev/null
+++ b/core/dex_preopt_config_merger.py
@@ -0,0 +1,99 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+"""
+A tool for merging dexpreopt.config files for <uses-library> dependencies into
+the dexpreopt.config file of the library/app that uses them. This is needed to
+generate class loader context (CLC) for dexpreopt.
+
+In Make there is no topological order when processing different modules, so a
+<uses-library> dependency module may have not been processed yet by the time the
+dependent module is processed. Therefore makefiles communicate the information
+from dependencies via dexpreopt.config files and add file-level dependencies
+from a module dexpreopt.config to its dependency configs. The actual patching
+of configs is done by this script, which is called from the makefiles.
+"""
+
+from __future__ import print_function
+
+import json
+from collections import OrderedDict
+import sys
+
+
+def main():
+  """Program entry point."""
+  if len(sys.argv) < 2:
+    raise SystemExit('usage: %s <main-config> [dep-config ...]' % sys.argv[0])
+
+  # Read all JSON configs.
+  cfgs = []
+  for arg in sys.argv[1:]:
+    with open(arg, 'r') as f:
+      cfgs.append(json.load(f, object_pairs_hook=OrderedDict))
+
+  # The first config is the dexpreopted library/app, the rest are its
+  # <uses-library> dependencies.
+  cfg0 = cfgs[0]
+
+  # Put dependency configs in a map keyed on module name (for easier lookup).
+  uses_libs = {}
+  for cfg in cfgs[1:]:
+    uses_libs[cfg['Name']] = cfg
+
+  # Load the original CLC map.
+  clc_map = cfg0['ClassLoaderContexts']
+
+  # Create a new CLC map that will be a copy of the original one with patched
+  # fields from dependency dexpreopt.config files.
+  clc_map2 = OrderedDict()
+
+  # Patch CLC for each SDK version. Although this should not be necessary for
+  # compatibility libraries (so-called "conditional CLC"), because they all have
+  # known names, known paths in system/framework, and no subcontext. But keep
+  # the loop in case this changes in the future.
+  for sdk_ver in clc_map:
+    clcs = clc_map[sdk_ver]
+    clcs2 = []
+    for clc in clcs:
+      lib = clc['Name']
+      if lib in uses_libs:
+        ulib = uses_libs[lib]
+        # The real <uses-library> name (may be different from the module name).
+        clc['Name'] = ulib['ProvidesUsesLibrary']
+        # On-device (install) path to the dependency DEX jar file.
+        clc['Device'] = ulib['DexLocation']
+        # CLC of the dependency becomes a subcontext. We only need sub-CLC for
+        # 'any' version because all other versions are for compatibility
+        # libraries, which exist only for apps and not for libraries.
+        clc['Subcontexts'] = ulib['ClassLoaderContexts'].get('any')
+      else:
+        # dexpreopt.config for this <uses-library> is not among the script
+        # arguments, which may be the case with compatibility libraries that
+        # don't need patching anyway. Just use the original CLC.
+        pass
+      clcs2.append(clc)
+    clc_map2[sdk_ver] = clcs2
+
+  # Overwrite the original class loader context with the patched one.
+  cfg0['ClassLoaderContexts'] = clc_map2
+
+  # Update dexpreopt.config file.
+  with open(sys.argv[1], 'w') as f:
+    f.write(json.dumps(cfgs[0], indent=4, separators=(',', ': ')))
+
+if __name__ == '__main__':
+  main()
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index c31d4e8..cbd3069 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -189,41 +189,68 @@
   my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
     $(LOCAL_OPTIONAL_USES_LIBRARIES))
 
-  # compatibility libraries are added to class loader context of an app only if
-  # targetSdkVersion in the app's manifest is lower than the given SDK version
+  # TODO(b/132357300): This may filter out too much, as PRODUCT_PACKAGES doesn't
+  # include all packages (the full list is unknown until reading all Android.mk
+  # makefiles). As a consequence, a library may be present but not included in
+  # dexpreopt, which will result in class loader context mismatch and a failure
+  # to load dexpreopt code on device. We should fix this, either by deferring
+  # dependency computation until the full list of product packages is known, or
+  # by adding product-specific lists of missing libraries.
+  my_filtered_optional_uses_libraries := $(filter $(PRODUCT_PACKAGES), \
+    $(my_filtered_optional_uses_libraries))
 
-  my_dexpreopt_libs_compat_28 := \
-    org.apache.http.legacy
+  ifeq ($(LOCAL_MODULE_CLASS),APPS)
+    # compatibility libraries are added to class loader context of an app only if
+    # targetSdkVersion in the app's manifest is lower than the given SDK version
 
-  my_dexpreopt_libs_compat_29 := \
-    android.hidl.base-V1.0-java \
-    android.hidl.manager-V1.0-java
+    my_dexpreopt_libs_compat_28 := \
+      org.apache.http.legacy
 
-  my_dexpreopt_libs_compat_30 := \
-    android.test.base \
-    android.test.mock
+    my_dexpreopt_libs_compat_29 := \
+      android.hidl.base-V1.0-java \
+      android.hidl.manager-V1.0-java
 
-  my_dexpreopt_libs_compat := \
-    $(my_dexpreopt_libs_compat_28) \
-    $(my_dexpreopt_libs_compat_29) \
-    $(my_dexpreopt_libs_compat_30)
+    my_dexpreopt_libs_compat_30 := \
+      android.test.base \
+      android.test.mock
 
-  my_dexpreopt_libs := $(sort \
+    my_dexpreopt_libs_compat := \
+      $(my_dexpreopt_libs_compat_28) \
+      $(my_dexpreopt_libs_compat_29) \
+      $(my_dexpreopt_libs_compat_30)
+  else
+    my_dexpreopt_libs_compat :=
+  endif
+
+  my_dexpreopt_libs := \
     $(LOCAL_USES_LIBRARIES) \
-    $(my_filtered_optional_uses_libraries) \
-  )
+    $(my_filtered_optional_uses_libraries)
 
   # 1: SDK version
   # 2: list of libraries
+  #
+  # Make does not process modules in topological order wrt. <uses-library>
+  # dependencies, therefore we cannot rely on variables to get the information
+  # about dependencies (in particular, their on-device path and class loader
+  # context). This information is communicated via dexpreopt.config files: each
+  # config depends on configs for <uses-library> dependencies of this module,
+  # and the dex_preopt_config_merger.py script reads all configs and inserts the
+  # missing bits from dependency configs into the module config.
+  #
+  # By default on-device path is /system/framework/*.jar, and class loader
+  # subcontext is empty. These values are correct for compatibility libraries,
+  # which are special and not handled by dex_preopt_config_merger.py.
+  #
   add_json_class_loader_context = \
-    $(call add_json_map, $(1)) \
+    $(call add_json_array, $(1)) \
     $(foreach lib, $(2),\
-      $(call add_json_map, $(lib)) \
-      $(eval file := $(filter %/$(lib).jar, $(call module-installed-files,$(lib)))) \
-      $(call add_json_str, Host,       $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
-      $(call add_json_str, Device,     $(call install-path-to-on-device-path,$(file))) \
+      $(call add_json_map_anon) \
+      $(call add_json_str, Name, $(lib)) \
+      $(call add_json_str, Host, $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar) \
+      $(call add_json_str, Device, /system/framework/$(lib).jar) \
+      $(call add_json_val, Subcontexts, null) \
       $(call end_json_map)) \
-    $(call end_json_map)
+    $(call end_json_array)
 
   # Record dex-preopt config.
   DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
@@ -251,7 +278,9 @@
   $(call add_json_list, PreoptFlags,                    $(LOCAL_DEX_PREOPT_FLAGS))
   $(call add_json_str,  ProfileClassListing,            $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE)))
   $(call add_json_bool, ProfileIsTextListing,           $(my_profile_is_text_listing))
+  $(call add_json_str,  EnforceUsesLibrariesStatusFile, $(intermediates.COMMON)/enforce_uses_libraries.status)
   $(call add_json_bool, EnforceUsesLibraries,           $(LOCAL_ENFORCE_USES_LIBRARIES))
+  $(call add_json_str,  ProvidesUsesLibrary,            $(firstword $(LOCAL_PROVIDES_USES_LIBRARY) $(LOCAL_MODULE)))
   $(call add_json_map,  ClassLoaderContexts)
   $(call add_json_class_loader_context, any, $(my_dexpreopt_libs))
   $(call add_json_class_loader_context,  28, $(my_dexpreopt_libs_compat_28))
@@ -273,12 +302,27 @@
   my_dexpreopt_config := $(intermediates)/dexpreopt.config
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
   my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
+  my_dexpreopt_config_merger := $(BUILD_SYSTEM)/dex_preopt_config_merger.py
 
+  # Module dexpreopt.config depends on dexpreopt.config files of each
+  # <uses-library> dependency, because these libraries may be processed after
+  # the current module by Make (there's no topological order), so the dependency
+  # information (paths, class loader context) may not be ready yet by the time
+  # this dexpreopt.config is generated. So it's necessary to add file-level
+  # dependencies between dexpreopt.config files.
+  my_dexpreopt_dep_configs := $(foreach lib, \
+    $(filter-out $(my_dexpreopt_libs_compat),$(LOCAL_USES_LIBRARIES) $(my_filtered_optional_uses_libraries)), \
+    $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,)/dexpreopt.config)
+
+  $(my_dexpreopt_config): $(my_dexpreopt_dep_configs) $(my_dexpreopt_config_merger)
   $(my_dexpreopt_config): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_config): PRIVATE_CONTENTS := $(json_contents)
+  $(my_dexpreopt_config): PRIVATE_DEP_CONFIGS := $(my_dexpreopt_dep_configs)
+  $(my_dexpreopt_config): PRIVATE_CONFIG_MERGER := $(my_dexpreopt_config_merger)
   $(my_dexpreopt_config):
 	@echo "$(PRIVATE_MODULE) dexpreopt.config"
 	echo -e -n '$(subst $(newline),\n,$(subst ','\'',$(subst \,\\,$(PRIVATE_CONTENTS))))' > $@
+	$(PRIVATE_CONFIG_MERGER) $@ $(PRIVATE_DEP_CONFIGS)
 
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
@@ -302,6 +346,9 @@
       $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/javalib.jar)
   my_dexpreopt_deps += $(my_dexpreopt_images_deps)
   my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+  ifeq ($(LOCAL_ENFORCE_USES_LIBRARIES),true)
+    my_dexpreopt_deps += $(intermediates.COMMON)/enforce_uses_libraries.status
+  endif
 
   $(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_zip): $(my_dexpreopt_deps)
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
new file mode 100644
index 0000000..9b1f2c2
--- /dev/null
+++ b/core/dumpconfig.mk
@@ -0,0 +1,144 @@
+# Read and dump the product configuration.
+
+# Called from the product-config tool, not from the main build system.
+
+#
+# Ensure we are being called correctly
+#
+ifndef KATI
+    $(warning Kati must be used to call dumpconfig.mk, not make.)
+    $(error stopping)
+endif
+
+ifdef DEFAULT_GOAL
+    $(warning Calling dumpconfig.mk from inside the make build system is not)
+    $(warning supported. It is only meant to be called via kati by product-confing.)
+    $(error stopping)
+endif
+
+ifndef TARGET_PRODUCT
+    $(warning dumpconfig.mk requires TARGET_PRODUCT to be set)
+    $(error stopping)
+endif
+
+ifndef TARGET_BUILD_VARIANT
+    $(warning dumpconfig.mk requires TARGET_BUILD_VARIANT to be set)
+    $(error stopping)
+endif
+
+ifneq (build/make/core/config.mk,$(wildcard build/make/core/config.mk))
+    $(warning dumpconfig must be called from the root of the source tree)
+    $(error stopping)
+endif
+
+ifeq (,$(DUMPCONFIG_FILE))
+    $(warning dumpconfig requires DUMPCONFIG_FILE to be set)
+    $(error stopping)
+endif
+
+# Skip the second inclusion of all of the product config files, because
+# we will do these checks in the product_config tool.
+SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK := true
+
+# Before we do anything else output the format version.
+$(file > $(DUMPCONFIG_FILE),dumpconfig_version,1)
+$(file >> $(DUMPCONFIG_FILE),dumpconfig_file,$(DUMPCONFIG_FILE))
+
+# Default goal for dumpconfig
+dumpconfig:
+	$(file >> $(DUMPCONFIG_FILE),***DONE***)
+	@echo ***DONE***
+
+# TODO(Remove): These need to be set externally
+OUT_DIR := out
+TMPDIR = /tmp/build-temp
+BUILD_DATETIME_FILE := $(OUT_DIR)/build_date.txt
+
+# Escape quotation marks for CSV, and wraps in quotation marks.
+define escape-for-csv
+"$(subst ","",$1)"
+endef
+
+# Args:
+#   $(1): include stack
+define dump-import-start
+$(eval $(file >> $(DUMPCONFIG_FILE),import,$(strip $(1))))
+endef
+
+# Args:
+#   $(1): include stack
+define dump-import-done
+$(eval $(file >> $(DUMPCONFIG_FILE),imported,$(strip $(1))))
+endef
+
+# Args:
+#   $(1): Current file
+#   $(2): Inherited file
+define dump-inherit
+$(eval $(file >> $(DUMPCONFIG_FILE),inherit,$(strip $(1)),$(strip $(2))))
+endef
+
+# Args:
+#   $(1): Config phase (PRODUCT, EXPAND, or DEVICE)
+#   $(2): Root nodes to import
+#   $(3): All variable names
+#   $(4): Single-value variables
+#   $(5): Makefile being processed
+define dump-phase-start
+$(eval $(file >> $(DUMPCONFIG_FILE),phase,$(strip $(1)),$(strip $(2)))) \
+$(foreach var,$(3), \
+    $(eval $(file >> $(DUMPCONFIG_FILE),var,$(if $(filter $(4),$(var)),single,list),$(var))) \
+) \
+$(call dump-config-vals,$(strip $(5)),initial)
+endef
+
+# Args:
+#   $(1): Makefile being processed
+define dump-phase-end
+$(call dump-config-vals,$(strip $(1)),final)
+endef
+
+define dump-debug
+$(eval $(file >> $(DUMPCONFIG_FILE),debug,$(1)))
+endef
+
+# Skip these when dumping. They're not used and they cause a lot of noise in the dump.
+DUMPCONFIG_SKIP_VARS := \
+	.VARIABLES \
+	.KATI_SYMBOLS \
+	1 \
+	2 \
+	3 \
+	4 \
+	5 \
+	6 \
+	7 \
+	8 \
+	9 \
+	LOCAL_PATH \
+	MAKEFILE_LIST \
+	PARENT_PRODUCT_FILES \
+	current_mk \
+	_eiv_ev \
+	_eiv_i \
+	_eiv_sv \
+	_eiv_tv \
+	inherit_var \
+	np \
+	_node_import_context \
+	_included \
+	_include_stack \
+	_in \
+	_nic.%
+
+# Args:
+#   $(1): Makefile that was included
+#   $(2): block (before,import,after,initial,final)
+define dump-config-vals
+$(foreach var,$(filter-out $(DUMPCONFIG_SKIP_VARS),$(.KATI_SYMBOLS)),\
+    $(eval $(file >> $(DUMPCONFIG_FILE),val,$(call escape-for-csv,$(1)),$(2),$(call escape-for-csv,$(var)),$(call escape-for-csv,$($(var))),$(call escape-for-csv,$(KATI_variable_location $(var))))) \
+)
+endef
+
+include build/make/core/config.mk
+
diff --git a/core/envsetup.mk b/core/envsetup.mk
index a5571ae..8c25086 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -275,7 +275,7 @@
 _vendor_dlkm_path_placeholder := ||VENDOR_DLKM-PATH-PH||
 _odm_dlkm_path_placeholder := ||ODM_DLKM-PATH-PH||
 TARGET_COPY_OUT_VENDOR := $(_vendor_path_placeholder)
-TARGET_COPY_OUT_VENDOR_RAMDISK := vendor-ramdisk
+TARGET_COPY_OUT_VENDOR_RAMDISK := vendor_ramdisk
 TARGET_COPY_OUT_PRODUCT := $(_product_path_placeholder)
 # TODO(b/135957588) TARGET_COPY_OUT_PRODUCT_SERVICES will copy the target to
 # product
@@ -301,6 +301,10 @@
     com.android.art:okhttp \
     com.android.art:bouncycastle \
     com.android.art:apache-xml
+# With EMMA_INSTRUMENT_FRAMEWORK=true the Core libraries depend on jacoco.
+ifeq (true,$(EMMA_INSTRUMENT_FRAMEWORK))
+  ART_APEX_JARS += com.android.art:jacocoagent
+endif
 #################################################################
 
 # Read the product specs so we can get TARGET_DEVICE and other
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index da32978..5eeb8ac 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -125,8 +125,7 @@
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo Header JarJar: $@
-	$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+	$(call transform-jarjar)
 else
 full_classes_header_jarjar := $(full_classes_turbine_jar)
 endif
@@ -149,8 +148,7 @@
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo JarJar: $@
-	$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+	$(call transform-jarjar)
 else
 full_classes_jarjar_jar := $(full_classes_combined_jar)
 endif
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index f9abe9b..0f95202 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -113,8 +113,7 @@
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo JarJar: $@
-	$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+	$(call transform-jarjar)
 else
 full_classes_jarjar_jar := $(full_classes_combined_jar)
 endif
diff --git a/core/java.mk b/core/java.mk
index 5fe8da5..d28c0c4 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -253,8 +253,7 @@
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo Header JarJar: $@
-	$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+	$(call transform-jarjar)
 else
 full_classes_header_jarjar := $(full_classes_turbine_jar)
 endif
@@ -334,8 +333,7 @@
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_jarjar_jar): $(full_classes_processed_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	@echo JarJar: $@
-	$(hide) $(JAVA) -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+	$(call transform-jarjar)
 else
 full_classes_jarjar_jar := $(full_classes_processed_jar)
 endif
diff --git a/core/main.mk b/core/main.mk
index fb13093..63fac43 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -41,7 +41,12 @@
 # without changing the command line every time.  Avoids rebuilds
 # when using ninja.
 $(shell mkdir -p $(SOONG_OUT_DIR) && \
-    echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.txt)
+    echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.tmp; \
+    if ! cmp -s $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; then \
+        mv $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; \
+    else \
+        rm $(SOONG_OUT_DIR)/build_number.tmp; \
+    fi)
 BUILD_NUMBER_FILE := $(SOONG_OUT_DIR)/build_number.txt
 .KATI_READONLY := BUILD_NUMBER_FILE
 $(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
@@ -83,6 +88,8 @@
 -include test/vts/tools/vts-core-tradefed/build/config.mk
 # CSUITE-specific config.
 -include test/app_compat/csuite/tools/build/config.mk
+# CTS-Root-specific config.
+-include test/cts-root/tools/build/config.mk
 
 # Clean rules
 .PHONY: clean-dex-files
@@ -285,9 +292,6 @@
 
 ADDITIONAL_VENDOR_PROPERTIES += \
     ro.vendor.build.security_patch=$(VENDOR_SECURITY_PATCH) \
-    ro.vendor.product.cpu.abilist=$(TARGET_CPU_ABI_LIST) \
-    ro.vendor.product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT) \
-    ro.vendor.product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT) \
     ro.product.board=$(TARGET_BOOTLOADER_BOARD_NAME) \
     ro.board.platform=$(TARGET_BOARD_PLATFORM) \
     ro.hwui.use_vulkan=$(TARGET_USES_VULKAN)
@@ -302,11 +306,6 @@
     ro.build.ab_update=$(AB_OTA_UPDATER)
 endif
 
-ADDITIONAL_ODM_PROPERTIES += \
-    ro.odm.product.cpu.abilist=$(TARGET_CPU_ABI_LIST) \
-    ro.odm.product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT) \
-    ro.odm.product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)
-
 # Set ro.product.vndk.version to know the VNDK version required by product
 # modules. It uses the version in PRODUCT_PRODUCT_VNDK_VERSION. If the value
 # is "current", use PLATFORM_VNDK_VERSION.
@@ -1120,7 +1119,11 @@
 # Expand a list of modules to the modules that they override (if any)
 # $(1): The list of modules.
 define module-overrides
-$(foreach m,$(1),$(PACKAGES.$(m).OVERRIDES) $(EXECUTABLES.$(m).OVERRIDES) $(SHARED_LIBRARIES.$(m).OVERRIDES) $(ETC.$(m).OVERRIDES))
+$(foreach m,$(1),\
+  $(eval _mo_overrides := $(PACKAGES.$(m).OVERRIDES) $(EXECUTABLES.$(m).OVERRIDES) $(SHARED_LIBRARIES.$(m).OVERRIDES) $(ETC.$(m).OVERRIDES))\
+  $(if $(filter $(m),$(_mo_overrides)),\
+    $(error Module $(m) cannot override itself),\
+    $(_mo_overrides)))
 endef
 
 ###########################################################
@@ -1260,8 +1263,10 @@
         $(if $(or $(ALL_MODULES.$(m).PATH),$(call get-modules-for-2nd-arch,TARGET,$(m))),,$(m)))
       $(call maybe-print-list-and-error,$(filter-out $(_allow_list),$(_nonexistent_modules)),\
         $(INTERNAL_PRODUCT) includes non-existent modules in PRODUCT_PACKAGES)
-      $(call maybe-print-list-and-error,$(filter-out $(_nonexistent_modules),$(_allow_list)),\
-        $(INTERNAL_PRODUCT) includes redundant allow list entries for non-existent PRODUCT_PACKAGES)
+      # TODO(b/182105280): Consider re-enabling this check when the ART modules
+      # have been cleaned up from the allowed_list in target/product/generic.mk.
+      #$(call maybe-print-list-and-error,$(filter-out $(_nonexistent_modules),$(_allow_list)),\
+      #  $(INTERNAL_PRODUCT) includes redundant allow list entries for non-existent PRODUCT_PACKAGES)
     endif
 
     # Check to ensure that all modules in PRODUCT_HOST_PACKAGES exist
@@ -1467,9 +1472,6 @@
 .PHONY: ramdisk_test_harness
 ramdisk_test_harness: $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
 
-.PHONY: vendor_ramdisk_debug
-vendor_ramdisk_debug: $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
-
 .PHONY: userdataimage
 userdataimage: $(INSTALLED_USERDATAIMAGE_TARGET)
 
@@ -1492,6 +1494,12 @@
 .PHONY: vendorbootimage_debug
 vendorbootimage_debug: $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET)
 
+.PHONY: vendorramdisk
+vendorramdisk: $(INSTALLED_VENDOR_RAMDISK_TARGET)
+
+.PHONY: vendorramdisk_debug
+vendorramdisk_debug: $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET)
+
 .PHONY: productimage
 productimage: $(INSTALLED_PRODUCTIMAGE_TARGET)
 
@@ -1549,8 +1557,9 @@
     $(INSTALLED_BPTIMAGE_TARGET) \
     $(INSTALLED_VENDORIMAGE_TARGET) \
     $(INSTALLED_VENDOR_BOOTIMAGE_TARGET) \
-    $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
     $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+    $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
     $(INSTALLED_ODMIMAGE_TARGET) \
     $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
     $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
@@ -1734,8 +1743,9 @@
       $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
       $(INSTALLED_DEBUG_RAMDISK_TARGET) \
       $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
-      $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
       $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+      $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+      $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
     )
     $(call dist-for-goals, bootimage_test_harness, \
       $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
@@ -1750,7 +1760,6 @@
   endif
 
   ifeq ($(EMMA_INSTRUMENT),true)
-    $(JACOCO_REPORT_CLASSES_ALL) : $(modules_to_install)
     $(call dist-for-goals, dist_files, $(JACOCO_REPORT_CLASSES_ALL))
   endif
 
diff --git a/core/node_fns.mk b/core/node_fns.mk
index b81d60c..8d20160 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -195,7 +195,11 @@
   $(call clear-var-list, $(3))
   $(eval LOCAL_PATH := $(patsubst %/,%,$(dir $(2))))
   $(eval MAKEFILE_LIST :=)
+  $(call dump-import-start,$(_include_stack))
+  $(call dump-config-vals,$(2),before)
   $(eval include $(2))
+  $(call dump-import-done,$(_include_stack))
+  $(call dump-config-vals,$(2),after)
   $(eval _included := $(filter-out $(2),$(MAKEFILE_LIST)))
   $(eval MAKEFILE_LIST :=)
   $(eval LOCAL_PATH :=)
@@ -250,6 +254,7 @@
 #       of the default list semantics
 #
 define import-nodes
+$(call dump-phase-start,$(1),$(2),$(3),$(4),build/make/core/node_fns.mk) \
 $(if \
   $(foreach _in,$(2), \
     $(eval _node_import_context := _nic.$(1).[[$(_in)]]) \
@@ -263,5 +268,6 @@
     $(if $(_include_stack),$(eval $(error ASSERTION FAILED: _include_stack \
                 should be empty here: $(_include_stack))),) \
    ) \
-,)
+,) \
+$(call dump-phase-end,build/make/core/node_fns.mk)
 endef
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 89f822b..9678380 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -80,7 +80,7 @@
 ifeq (true,$(is_container))
 # Include shared libraries' notices for "container" types, but not for binaries etc.
 notice_deps := \
-    $(sort \
+    $(strip \
         $(LOCAL_REQUIRED_MODULES) \
         $(LOCAL_STATIC_LIBRARIES) \
         $(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -95,7 +95,7 @@
     )
 else
 notice_deps := \
-    $(sort \
+    $(strip \
         $(LOCAL_REQUIRED_MODULES) \
         $(LOCAL_STATIC_LIBRARIES) \
         $(LOCAL_WHOLE_STATIC_LIBRARIES) \
@@ -106,24 +106,24 @@
     )
 endif
 ifeq ($(LOCAL_IS_HOST_MODULE),true)
-notice_deps := $(sort $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_HOST_REQUIRED_MODULES))
 else
-notice_deps := $(sort $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
+notice_deps := $(strip $(notice_deps) $(LOCAL_TARGET_REQUIRED_MODULES))
 endif
 
 ifdef my_register_name
 ALL_MODULES.$(my_register_name).LICENSE_PACKAGE_NAME := $(strip $(license_package_name))
-ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds))
-ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions))
-ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(sort $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map))
-ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(sort $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps))
-ALL_MODULES.$(my_register_name).IS_CONTAINER := $(sort $(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container))
+ALL_MODULES.$(my_register_name).LICENSE_KINDS := $(ALL_MODULES.$(my_register_name).LICENSE_KINDS) $(license_kinds)
+ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS := $(ALL_MODULES.$(my_register_name).LICENSE_CONDITIONS) $(license_conditions)
+ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP := $(ALL_MODULES.$(my_register_name).LICENSE_INSTALL_MAP) $(install_map)
+ALL_MODULES.$(my_register_name).NOTICE_DEPS := $(ALL_MODULES.$(my_register_name).NOTICE_DEPS) $(notice_deps)
+ALL_MODULES.$(my_register_name).IS_CONTAINER := $(strip $(filter-out false,$(ALL_MODULES.$(my_register_name).IS_CONTAINER) $(is_container)))
 endif
 
 ifdef notice_file
 
 ifdef my_register_name
-ALL_MODULES.$(my_register_name).NOTICES := $(sort $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file))
+ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
 endif
 
 # This relies on the name of the directory in PRODUCT_OUT matching where
@@ -180,9 +180,10 @@
 installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
 
 ifdef my_register_name
-ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(installed_notice_file)
-endif
-
+ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE) $(installed_notice_file)
+ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES := $(ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES) $(module_installed_filename)
+INSTALLED_NOTICE_FILES.$(installed_notice_file).MODULE := $(my_register_name)
+else
 $(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
 $(installed_notice_file) : PRIVATE_NOTICES := $(notice_file)
 
@@ -190,6 +191,7 @@
 	@echo Notice file: $< -- $@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
+endif
 
 ifdef LOCAL_INSTALLED_MODULE
 # Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
diff --git a/core/package_internal.mk b/core/package_internal.mk
index a97e401..346ca24 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -101,7 +101,6 @@
 enforce_rro_enabled :=
 ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
   # * means all system and system_ext APKs, so enable conditionally based on module path.
-  # Note that modules in PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS are excluded even if it is '*'
 
   # Note that base_rules.mk has not yet been included, so it's likely that only
   # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
@@ -120,12 +119,6 @@
   enforce_rro_enabled := true
 endif
 
-# TODO(b/150820813) Some modules depend on static overlay, remove this after eliminating the dependency.
-ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS)))
-  enforce_rro_enabled :=
-endif
-
-
 product_package_overlays := $(strip \
     $(wildcard $(foreach dir, $(PRODUCT_PACKAGE_OVERLAYS), \
       $(addprefix $(dir)/, $(LOCAL_RESOURCE_DIR)))))
@@ -552,6 +545,10 @@
 ifeq (true, $(LOCAL_UNCOMPRESS_DEX))
 $(LOCAL_BUILT_MODULE) : $(ZIP2ZIP)
 endif
+ifeq ($(full_classes_jar),)
+  # We don't build jar, need to add the Java resources here.
+  $(LOCAL_BUILT_MODULE): $(java_resource_sources)
+endif
 $(LOCAL_BUILT_MODULE): PRIVATE_USE_EMBEDDED_NATIVE_LIBS := $(LOCAL_USE_EMBEDDED_NATIVE_LIBS)
 $(LOCAL_BUILT_MODULE):
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
@@ -603,6 +600,8 @@
 else
   $(my_bundle_module): PRIVATE_DEX_FILE :=
   $(my_bundle_module): PRIVATE_SOURCE_ARCHIVE :=
+  # We don't build jar, need to add the Java resources here.
+  $(my_bundle_module): $(java_resource_sources)
 endif # full_classes_jar
 
 $(my_bundle_module): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
diff --git a/core/product.mk b/core/product.mk
index f5e81e7..19e760b 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -195,9 +195,6 @@
 # Package list to apply enforcing RRO.
 _product_list_vars += PRODUCT_ENFORCE_RRO_TARGETS
 
-# Packages to skip auto-generating RROs for when PRODUCT_ENFORCE_RRO_TARGETS is set to *.
-_product_list_vars += PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
-
 _product_list_vars += PRODUCT_SDK_ATREE_FILES
 _product_list_vars += PRODUCT_SDK_ADDON_NAME
 _product_list_vars += PRODUCT_SDK_ADDON_COPY_FILES
@@ -224,10 +221,10 @@
 # instead of PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
 _product_list_vars += PRODUCT_BOOT_JARS_EXTRA
 
-_product_list_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
-_product_list_vars += PRODUCT_SUPPORTS_VBOOT
-_product_list_vars += PRODUCT_SUPPORTS_VERITY
-_product_list_vars += PRODUCT_SUPPORTS_VERITY_FEC
+_product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
+_product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
+_product_single_value_vars += PRODUCT_SUPPORTS_VERITY
+_product_single_value_vars += PRODUCT_SUPPORTS_VERITY_FEC
 _product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
 _product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
 # List of system_server jars delivered via apex. Format = <apex name>:<jar name>.
@@ -363,6 +360,11 @@
 _product_list_vars += PRODUCT_PACKAGE_NAME_OVERRIDES
 _product_list_vars += PRODUCT_CERTIFICATE_OVERRIDES
 
+# A list of <overridden-apex>:<override-apex> pairs that specifies APEX module
+# overrides to be applied to the APEX names in the boot jar variables
+# (PRODUCT_BOOT_JARS, PRODUCT_UPDATABLE_BOOT_JARS etc).
+_product_list_vars += PRODUCT_BOOT_JAR_MODULE_OVERRIDES
+
 # Controls for whether different partitions are built for the current product.
 _product_single_value_vars += PRODUCT_BUILD_SYSTEM_IMAGE
 _product_single_value_vars += PRODUCT_BUILD_SYSTEM_OTHER_IMAGE
@@ -389,6 +391,9 @@
 # If set, device uses virtual A/B.
 _product_single_value_vars += PRODUCT_VIRTUAL_AB_OTA
 
+# If set, device uses virtual A/B Compression.
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COMPRESSION
+
 # If set, device retrofits virtual A/B.
 _product_single_value_vars += PRODUCT_VIRTUAL_AB_OTA_RETROFIT
 
@@ -457,7 +462,9 @@
   $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
   $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
   $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
-  $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk)))
+  $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
+  $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+  $(call dump-config-vals,$(current_mk),inherit)
 endef
 
 # Specifies a number of path prefixes, relative to PRODUCT_OUT, where the
@@ -604,6 +611,8 @@
 # to a shorthand that is more convenient to read from elsewhere.
 #
 define strip-product-vars
+$(call dump-phase-start,PRODUCT-EXPAND,,$(_product_var_list),$(_product_single_value_vars), \
+		build/make/core/product.mk) \
 $(foreach v,\
   $(_product_var_list) \
     PRODUCT_ENFORCE_PACKAGES_EXIST \
@@ -611,7 +620,8 @@
   $(eval $(v) := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).$(v)))) \
   $(eval get-product-var = $$(if $$(filter $$(1),$$(INTERNAL_PRODUCT)),$$($$(2)),$$(PRODUCTS.$$(strip $$(1)).$$(2)))) \
   $(KATI_obsolete_var PRODUCTS.$(INTERNAL_PRODUCT).$(v),Use $(v) instead) \
-)
+) \
+$(call dump-phase-end,build/make/core/product.mk)
 endef
 
 define add-to-product-copy-files-if-exists
diff --git a/core/product_config.mk b/core/product_config.mk
index 7b72b5e..d703ee3 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -146,6 +146,11 @@
 endif
 endif
 
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+_product_config_saved_KATI_ALLOW_RULES := $(.KATI_ALLOW_RULES)
+.KATI_ALLOW_RULES := $(ALLOW_RULES_IN_PRODUCT_CONFIG)
+endif
+
 ifeq ($(load_all_product_makefiles),true)
 # Import all product makefiles.
 $(call import-products, $(all_product_makefiles))
@@ -163,12 +168,19 @@
 # Quick check
 $(check-all-products)
 
+ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
 # Import all the products that have made artifact path requirements, so that we can verify
 # the artifacts they produce.
 # These are imported after check-all-products because some of them might not be real products.
 $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
   $(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
 )
+endif
+
+ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
+.KATI_ALLOW_RULES := $(_saved_KATI_ALLOW_RULES)
+_product_config_saved_KATI_ALLOW_RULES :=
+endif
 
 ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
 $(dump-products)
@@ -181,14 +193,16 @@
 ifneq ($(current_product_makefile),$(INTERNAL_PRODUCT))
 $(error PRODUCT_NAME inconsistent in $(current_product_makefile) and $(INTERNAL_PRODUCT))
 endif
-current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
+
 
 ############################################################################
 # Strip and assign the PRODUCT_ variables.
 $(call strip-product-vars)
 
+current_product_makefile :=
+all_product_makefiles :=
+all_product_configs :=
+
 #############################################################################
 # Quick check and assign default values
 
@@ -224,6 +238,19 @@
 PRODUCT_BOOT_JARS := $(foreach pair,$(PRODUCT_BOOT_JARS), \
   $(if $(findstring :,$(pair)),,platform:)$(pair))
 
+# Replaces references to overridden boot jar modules in a boot jars variable.
+# $(1): Name of a boot jars variable with <apex>:<jar> pairs.
+define replace-boot-jar-module-overrides
+  $(foreach pair,$(PRODUCT_BOOT_JAR_MODULE_OVERRIDES),\
+    $(eval _rbjmo_from := $(call word-colon,1,$(pair)))\
+    $(eval _rbjmo_to := $(call word-colon,2,$(pair)))\
+    $(eval $(1) := $(patsubst $(_rbjmo_from):%,$(_rbjmo_to):%,$($(1)))))
+endef
+
+$(call replace-boot-jar-module-overrides,PRODUCT_BOOT_JARS)
+$(call replace-boot-jar-module-overrides,PRODUCT_UPDATABLE_BOOT_JARS)
+$(call replace-boot-jar-module-overrides,ART_APEX_JARS)
+
 # The extra system server jars must be appended at the end after common system server jars.
 PRODUCT_SYSTEM_SERVER_JARS += $(PRODUCT_SYSTEM_SERVER_JARS_EXTRA)
 
@@ -401,6 +428,11 @@
 $(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
 $(KATI_obsolete_var PRODUCT_USE_PRODUCT_VNDK_OVERRIDE,Use PRODUCT_PRODUCT_VNDK_VERSION instead)
 
+ifdef PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS
+    $(error PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS is deprecated, consider using RRO for \
+      $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
+endif
+
 define product-overrides-config
 $$(foreach rule,$$(PRODUCT_$(1)_OVERRIDES),\
     $$(if $$(filter 2,$$(words $$(subst :,$$(space),$$(rule)))),,\
diff --git a/core/rbe.mk b/core/rbe.mk
index 91606d4..19c0e42 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -34,6 +34,12 @@
     cxx_compare := false
   endif
 
+  ifdef RBE_CXX_COMPARE
+    cxx_compare := $(RBE_CXX_COMPARE)
+  else
+    cxx_compare := "false"
+  endif
+
   ifdef RBE_JAVAC_EXEC_STRATEGY
     javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
   else
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 48ab0eb..9fdf7b8 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -80,7 +80,6 @@
 $(call add_json_list, DeviceResourceOverlays,            $(DEVICE_PACKAGE_OVERLAYS))
 $(call add_json_list, ProductResourceOverlays,           $(PRODUCT_PACKAGE_OVERLAYS))
 $(call add_json_list, EnforceRROTargets,                 $(PRODUCT_ENFORCE_RRO_TARGETS))
-$(call add_json_list, EnforceRROExemptedTargets,         $(PRODUCT_ENFORCE_RRO_EXEMPTED_TARGETS))
 $(call add_json_list, EnforceRROExcludedOverlays,        $(PRODUCT_ENFORCE_RRO_EXCLUDED_OVERLAYS))
 
 $(call add_json_str,  AAPTCharacteristics,               $(TARGET_AAPT_CHARACTERISTICS))
@@ -103,6 +102,10 @@
 $(call add_json_list, CFIIncludePaths,                   $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
 $(call add_json_list, IntegerOverflowExcludePaths,       $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
 
+$(call add_json_list, MemtagHeapExcludePaths,            $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
+$(call add_json_list, MemtagHeapAsyncIncludePaths,       $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS))
+$(call add_json_list, MemtagHeapSyncIncludePaths,        $(MEMTAG_HEAP_SYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_SYNC_INCLUDE_PATHS))
+
 $(call add_json_bool, DisableScudo,                      $(filter true,$(PRODUCT_DISABLE_SCUDO)))
 
 $(call add_json_bool, ClangTidy,                         $(filter 1 true,$(WITH_TIDY)))
@@ -143,13 +146,24 @@
 $(call add_json_bool, VndkUseCoreVariant,                $(TARGET_VNDK_USE_CORE_VARIANT))
 $(call add_json_bool, VndkSnapshotBuildArtifacts,        $(VNDK_SNAPSHOT_BUILD_ARTIFACTS))
 
+$(call add_json_bool, DirectedVendorSnapshot,            $(DIRECTED_VENDOR_SNAPSHOT))
+$(call add_json_map,  VendorSnapshotModules)
+$(foreach module,$(VENDOR_SNAPSHOT_MODULES),\
+  $(call add_json_bool,$(module),true))
+$(call end_json_map)
+
+$(call add_json_bool, DirectedRecoverySnapshot,          $(DIRECTED_RECOVERY_SNAPSHOT))
+$(call add_json_map,  RecoverySnapshotModules)
+$(foreach module,$(RECOVERY_SNAPSHOT_MODULES),\
+  $(call add_json_bool,$(module),true))
+$(call end_json_map)
+
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
 $(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
 
 $(call add_json_bool, Check_elf_files,                   $(filter true,$(PRODUCT_CHECK_ELF_FILES)))
 
 $(call add_json_bool, Uml,                               $(filter true,$(TARGET_USER_MODE_LINUX)))
-$(call add_json_bool, Use_lmkd_stats_log,                $(filter true,$(TARGET_LMKD_STATS_LOG)))
 $(call add_json_str,  VendorPath,                        $(TARGET_COPY_OUT_VENDOR))
 $(call add_json_str,  OdmPath,                           $(TARGET_COPY_OUT_ODM))
 $(call add_json_str,  VendorDlkmPath,                    $(TARGET_COPY_OUT_VENDOR_DLKM))
@@ -227,6 +241,11 @@
 $(call add_json_list, BoardKernelModuleInterfaceVersions, $(BOARD_KERNEL_MODULE_INTERFACE_VERSIONS))
 
 $(call add_json_bool, BoardMoveRecoveryResourcesToVendorBoot, $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
+$(call add_json_str,  PrebuiltHiddenApiDir, $(BOARD_PREBUILT_HIDDENAPI_DIR))
+
+$(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
+
+$(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE))
 
 $(call json_end)
 
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index 5444d96..c600178 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -5,6 +5,7 @@
 # LOCAL_SOONG_HEADER_JAR
 # LOCAL_SOONG_DEX_JAR
 # LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
+# LOCAL_SOONG_DEXPREOPT_CONFIG
 
 ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
   $(call pretty-error,soong_java_prebuilt.mk may only be used from Soong)
@@ -146,6 +147,12 @@
   ALL_MODULES.$(my_register_name).AAR := $(LOCAL_SOONG_AAR)
 endif
 
+# Copy dexpreopt.config files from Soong libraries to the location where Make
+# modules can find them.
+ifdef LOCAL_SOONG_DEXPREOPT_CONFIG
+  $(eval $(call copy-one-file,$(LOCAL_SOONG_DEXPREOPT_CONFIG), $(call local-intermediates-dir,)/dexpreopt.config))
+endif
+
 javac-check : $(full_classes_jar)
 javac-check-$(LOCAL_MODULE) : $(full_classes_jar)
 .PHONY: javac-check-$(LOCAL_MODULE)
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 1540736..359d3d2 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,6 +47,11 @@
         echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
         echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
     )\
+    $(if $(filter system vendor odm,$(1)),\
+        echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST) " >> $(2);\
+        echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
+        echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+    )\
     echo "ro.$(1).build.date=`$(DATE_FROM_FILE)`" >> $(2);\
     echo "ro.$(1).build.date.utc=`$(DATE_FROM_FILE) +%s`" >> $(2);\
     echo "ro.$(1).build.fingerprint=$(BUILD_FINGERPRINT_FROM_FILE)" >> $(2);\
@@ -326,7 +331,7 @@
 $(android_info_prop): $(INSTALLED_ANDROID_INFO_TXT_TARGET)
 	cat $< | grep 'require version-' | sed -e 's/require version-/ro.build.expect./g' > $@
 
-_prop_files_ += $(android_info_pro)
+_prop_files_ += $(android_info_prop)
 
 ifdef property_overrides_split_enabled
 # Order matters here. When there are duplicates, the last one wins.
diff --git a/core/tasks/cts_root.mk b/core/tasks/cts_root.mk
new file mode 100644
index 0000000..b618121
--- /dev/null
+++ b/core/tasks/cts_root.mk
@@ -0,0 +1,25 @@
+# Copyright (C) 2021 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/cts-root/README.md),)
+test_suite_name := cts_root
+test_suite_tradefed := cts-root-tradefed
+test_suite_readme := test/cts-root/README.md
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: cts_root
+cts_root: $(compatibility_zip)
+$(call dist-for-goals, cts_root, $(compatibility_zip))
+endif
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index cf32d65..4bbfd39 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -20,6 +20,7 @@
 			'"srcjars": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)", )], ' \
 			'"classes_jar": [$(foreach w,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)", )], ' \
 			'"test_mainline_modules": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)", )], ' \
+			'"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)", ' \
 			'},\n' \
 	 ) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
 	$(hide) echo '}' >> $@
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 2b43f0f..20a1694 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -19,6 +19,8 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := $(my_package_name)
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
+LOCAL_LICENSE_CONDITIONS := notice
 LOCAL_MODULE_CLASS := PACKAGING
 LOCAL_MODULE_STEM := $(my_package_name).zip
 LOCAL_UNINSTALLABLE_MODULE := true
diff --git a/core/verify_uses_libraries.sh b/core/verify_uses_libraries.sh
index dde0447..1bd0a2c 100755
--- a/core/verify_uses_libraries.sh
+++ b/core/verify_uses_libraries.sh
@@ -21,6 +21,7 @@
 
 set -e
 local_apk=$1
+status_file=$2
 badging=$(${aapt_binary} dump badging "${local_apk}")
 export sdk_version=$(echo "${badging}" | grep "sdkVersion" | sed -n "s/sdkVersion:'\(.*\)'/\1/p")
 # Export target_sdk_version to the caller.
@@ -28,20 +29,28 @@
 uses_libraries=$(echo "${badging}" | grep "uses-library" | sed -n "s/uses-library:'\(.*\)'/\1/p")
 optional_uses_libraries=$(echo "${badging}" | grep "uses-library-not-required" | sed -n "s/uses-library-not-required:'\(.*\)'/\1/p")
 
+errmsg=
+
 # Verify that the uses libraries match exactly.
 # Currently we validate the ordering of the libraries since it matters for resolution.
 single_line_libs=$(echo "${uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
 if [[ "${single_line_libs}" != "${uses_library_names}" ]]; then
-  echo "LOCAL_USES_LIBRARIES (${uses_library_names})" \
-       "do not match (${single_line_libs}) in manifest for ${local_apk}"
-  exit 1
+  errmsg="LOCAL_USES_LIBRARIES (${uses_library_names}) do not match (${single_line_libs}) in manifest for ${local_apk}"
 fi
 
 # Verify that the optional uses libraries match exactly.
 single_line_optional_libs=$(echo "${optional_uses_libraries}" | tr '\n' ' ' | awk '{$1=$1}1')
 if [[ "${single_line_optional_libs}" != "${optional_uses_library_names}" ]]; then
-  echo "LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) " \
-       "do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
-  exit 1
+  errmsg="LOCAL_OPTIONAL_USES_LIBRARIES (${optional_uses_library_names}) do not match (${single_line_optional_libs}) in manifest for ${local_apk}"
 fi
 
+if [[ ! -z "${errmsg}" ]]; then
+  echo "${errmsg}" > "${status_file}"
+  if [[ "${relax_check}" != true ]]; then
+    # fail immediately
+    echo "${errmsg}"
+    exit 1
+  fi
+else
+  touch "${status_file}"
+fi
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index fe90165..c9e3e80 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -240,7 +240,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-      PLATFORM_SECURITY_PATCH := 2021-01-05
+      PLATFORM_SECURITY_PATCH := 2021-03-05
 endif
 .KATI_READONLY := PLATFORM_SECURITY_PATCH
 
diff --git a/envsetup.sh b/envsetup.sh
index 8fa608b..58fcd3b 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -27,13 +27,17 @@
 - mangrep:    Greps on all local AndroidManifest.xml files.
 - mgrep:      Greps on all local Makefiles and *.bp files.
 - owngrep:    Greps on all local OWNERS files.
+- rsgrep:     Greps on all local Rust files.
 - sepgrep:    Greps on all local sepolicy files.
 - sgrep:      Greps on all local source files.
 - godir:      Go to the directory containing a file.
 - allmod:     List all modules.
 - gomod:      Go to the directory containing a module.
 - pathmod:    Get the directory containing a module.
-- refreshmod: Refresh list of modules for allmod/gomod/pathmod.
+- outmod:     Gets the location of a module's installed outputs with a certain extension.
+- dirmods:    Gets the modules defined in a given directory.
+- installmod: Adb installs a module's built APK.
+- refreshmod: Refresh list of modules for allmod/gomod/pathmod/outmod/installmod.
 - syswrite:   Remount partitions (e.g. system.img) as writable, rebooting if necessary.
 
 Environment options:
@@ -411,7 +415,10 @@
     fi
     complete -F _lunch lunch
 
+    complete -F _complete_android_module_names pathmod
     complete -F _complete_android_module_names gomod
+    complete -F _complete_android_module_names outmod
+    complete -F _complete_android_module_names installmod
     complete -F _complete_android_module_names m
 }
 
@@ -1032,6 +1039,12 @@
         -exec grep --color -n "$@" {} +
 }
 
+function rsgrep()
+{
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rs" \
+        -exec grep --color -n "$@" {} +
+}
+
 function cgrep()
 {
     find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
@@ -1378,9 +1391,8 @@
         > $ANDROID_PRODUCT_OUT/module-info.json.build.log 2>&1
 }
 
-# List all modules for the current device, as cached in module-info.json. If any build change is
-# made and it should be reflected in the output, you should run 'refreshmod' first.
-function allmod() {
+# Verifies that module-info.txt exists, creating it if it doesn't.
+function verifymodinfo() {
     if [ ! "$ANDROID_PRODUCT_OUT" ]; then
         echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
         return 1
@@ -1390,27 +1402,26 @@
         echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
         refreshmod || return 1
     fi
+}
+
+# List all modules for the current device, as cached in module-info.json. If any build change is
+# made and it should be reflected in the output, you should run 'refreshmod' first.
+function allmod() {
+    verifymodinfo || return 1
 
     python -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
 }
 
-# Get the path of a specific module in the android tree, as cached in module-info.json. If any build change
-# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first.  Note: This is the inverse of dirmods.
 function pathmod() {
-    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
-        echo "No ANDROID_PRODUCT_OUT. Try running 'lunch' first." >&2
-        return 1
-    fi
-
     if [[ $# -ne 1 ]]; then
         echo "usage: pathmod <module>" >&2
         return 1
     fi
 
-    if [ ! -f "$ANDROID_PRODUCT_OUT/module-info.json" ]; then
-        echo "Could not find module-info.json. It will only be built once, and it can be updated with 'refreshmod'" >&2
-        refreshmod || return 1
-    fi
+    verifymodinfo || return 1
 
     local relpath=$(python -c "import json, os
 module = '$1'
@@ -1427,6 +1438,36 @@
     fi
 }
 
+# Get the path of a specific module in the android tree, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run
+# 'refreshmod' first.  Note: This is the inverse of pathmod.
+function dirmods() {
+    if [[ $# -ne 1 ]]; then
+        echo "usage: dirmods <path>" >&2
+        return 1
+    fi
+
+    verifymodinfo || return 1
+
+    python -c "import json, os
+dir = '$1'
+while dir.endswith('/'):
+    dir = dir[:-1]
+prefix = dir + '/'
+module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
+results = set()
+for m in module_info.values():
+    for path in m.get(u'path', []):
+        if path == dir or path.startswith(prefix):
+            name = m.get(u'module_name')
+            if name:
+                results.add(name)
+for name in sorted(results):
+    print(name)
+"
+}
+
+
 # Go to a specific module in the android tree, as cached in module-info.json. If any build change
 # is made, and it should be reflected in the output, you should run 'refreshmod' first.
 function gomod() {
@@ -1442,6 +1483,59 @@
     cd $path
 }
 
+# Gets the list of a module's installed outputs, as cached in module-info.json.
+# If any build change is made, and it should be reflected in the output, you should run 'refreshmod' first.
+function outmod() {
+    if [[ $# -ne 1 ]]; then
+        echo "usage: outmod <module>" >&2
+        return 1
+    fi
+
+    verifymodinfo || return 1
+
+    local relpath
+    relpath=$(python -c "import json, os
+module = '$1'
+module_info = json.load(open('$ANDROID_PRODUCT_OUT/module-info.json'))
+if module not in module_info:
+    exit(1)
+for output in module_info[module]['installed']:
+    print(os.path.join('$ANDROID_BUILD_TOP', output))" 2>/dev/null)
+
+    if [ $? -ne 0 ]; then
+        echo "Could not find module '$1' (try 'refreshmod' if there have been build changes?)" >&2
+        return 1
+    elif [ ! -z "$relpath" ]; then
+        echo "$relpath"
+    fi
+}
+
+# adb install a module's apk, as cached in module-info.json. If any build change
+# is made, and it should be reflected in the output, you should run 'refreshmod' first.
+# Usage: installmod [adb install arguments] <module>
+# For example: installmod -r Dialer -> adb install -r /path/to/Dialer.apk
+function installmod() {
+    if [[ $# -eq 0 ]]; then
+        echo "usage: installmod [adb install arguments] <module>" >&2
+        return 1
+    fi
+
+    local _path
+    _path=$(outmod ${@:$#:1})
+    if [ $? -ne 0 ]; then
+        return 1
+    fi
+
+    _path=$(echo "$_path" | grep -E \\.apk$ | head -n 1)
+    if [ -z "$_path" ]; then
+        echo "Module '$1' does not produce a file ending with .apk (try 'refreshmod' if there have been build changes?)" >&2
+        return 1
+    fi
+    local length=$(( $# - 1 ))
+    echo adb install ${@:1:$length} $_path
+    adb install ${@:1:$length} $_path
+}
+
 function _complete_android_module_names() {
     local word=${COMP_WORDS[COMP_CWORD]}
     COMPREPLY=( $(allmod | grep -E "^$word") )
diff --git a/help.sh b/help.sh
index 4af5154..bdb078f 100755
--- a/help.sh
+++ b/help.sh
@@ -12,11 +12,15 @@
 source build/envsetup.sh    # Add "lunch" (and other utilities and variables)
                             # to the shell environment.
 lunch [<product>-<variant>] # Choose the device to target.
-m -j [<goals>]              # Execute the configured build.
+m [<goals>]                 # Execute the configured build.
 
 Usage of "m" imitates usage of the program "make".
 See '"${SCRIPT_DIR}"'/Usage.txt for more info about build usage and concepts.
 
+The parallelism of the build can be set with a -jN argument to "m".  If you
+don't provide a -j argument, the build system automatically selects a parallel
+task count that it thinks is optimal for your system.
+
 Common goals are:
 
     clean                   (aka clobber) equivalent to rm -rf out/
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 9edc85c..4dd6b17 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -35,6 +35,8 @@
 # $(DEVICE_MANIFEST_FILE) can be a list of files
 include $(CLEAR_VARS)
 LOCAL_MODULE        := vendor_manifest.xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
 LOCAL_MODULE_STEM   := manifest.xml
 LOCAL_MODULE_CLASS  := ETC
 LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
@@ -65,6 +67,8 @@
 my_fragment_files := $$($$(my_fragment_files_var))
 include $$(CLEAR_VARS)
 LOCAL_MODULE := vendor_manifest_$(1).xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
 LOCAL_MODULE_STEM := manifest_$(1).xml
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_PATH   := $(TARGET_OUT_VENDOR)/etc/vintf
@@ -94,6 +98,8 @@
 # ODM_MANIFEST_FILES is a list of files that is combined and installed as the default ODM manifest.
 include $(CLEAR_VARS)
 LOCAL_MODULE := odm_manifest.xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
 LOCAL_MODULE_STEM := manifest.xml
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_RELATIVE_PATH := vintf
@@ -124,6 +130,8 @@
 my_fragment_files := $$($$(my_fragment_files_var))
 include $$(CLEAR_VARS)
 LOCAL_MODULE := odm_manifest_$(1).xml
+LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0 legacy_not_a_contribution
+LOCAL_LICENSE_CONDITIONS := by_exception_only not_allowed notice
 LOCAL_MODULE_STEM := manifest_$(1).xml
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_RELATIVE_PATH := vintf
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index e34dc23..a2150ad 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -49,6 +49,10 @@
 BOARD_GSI_DYNAMIC_PARTITIONS_SIZE := 3221225472
 endif
 
+# TODO(b/123695868, b/146149698):
+#     This flag is set by mainline but isn't desired for GSI
+BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR :=
+
 # Enable chain partition for boot, mainly for GKI images.
 BOARD_AVB_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
 BOARD_AVB_BOOT_ALGORITHM := SHA256_RSA2048
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index bf015e5..00f6e5b 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -19,7 +19,8 @@
 # the devices with metadata parition
 BOARD_USES_METADATA_PARTITION := true
 
-BOARD_VNDK_VERSION := current
+# Default is current, but allow devices to override vndk version if needed.
+BOARD_VNDK_VERSION ?= current
 
 # Required flag for non-64 bit devices from P.
 TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/BoardConfigModuleCommon.mk b/target/board/BoardConfigModuleCommon.mk
new file mode 100644
index 0000000..24c01a5
--- /dev/null
+++ b/target/board/BoardConfigModuleCommon.mk
@@ -0,0 +1,6 @@
+# BoardConfigModuleCommon.mk
+#
+# Common compile-time settings for module builds.
+
+# Required for all module devices.
+TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/module_arm/BoardConfig.mk b/target/board/module_arm/BoardConfig.mk
index 565efc8..3f35c06 100644
--- a/target/board/module_arm/BoardConfig.mk
+++ b/target/board/module_arm/BoardConfig.mk
@@ -13,6 +13,8 @@
 # limitations under the License.
 #
 
+include build/make/target/board/BoardConfigModuleCommon.mk
+
 TARGET_ARCH := arm
 TARGET_ARCH_VARIANT := armv7-a-neon
 TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_arm64/BoardConfig.mk b/target/board/module_arm64/BoardConfig.mk
index 66e3792..3700056 100644
--- a/target/board/module_arm64/BoardConfig.mk
+++ b/target/board/module_arm64/BoardConfig.mk
@@ -13,6 +13,8 @@
 # limitations under the License.
 #
 
+include build/make/target/board/BoardConfigModuleCommon.mk
+
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
 TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_x86/BoardConfig.mk b/target/board/module_x86/BoardConfig.mk
index af3fffd..a93ac97 100644
--- a/target/board/module_x86/BoardConfig.mk
+++ b/target/board/module_x86/BoardConfig.mk
@@ -13,6 +13,8 @@
 # limitations under the License.
 #
 
+include build/make/target/board/BoardConfigModuleCommon.mk
+
 TARGET_CPU_ABI := x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
diff --git a/target/board/module_x86/device.mk b/target/board/module_x86/device.mk
deleted file mode 100644
index 7cac5f8..0000000
--- a/target/board/module_x86/device.mk
+++ /dev/null
@@ -1,18 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/board/module_x86_64/BoardConfig.mk b/target/board/module_x86_64/BoardConfig.mk
index 1ada027..1ed3be0 100644
--- a/target/board/module_x86_64/BoardConfig.mk
+++ b/target/board/module_x86_64/BoardConfig.mk
@@ -13,6 +13,8 @@
 # limitations under the License.
 #
 
+include build/make/target/board/BoardConfigModuleCommon.mk
+
 TARGET_CPU_ABI := x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/module_x86_64/device.mk b/target/board/module_x86_64/device.mk
deleted file mode 100644
index 9065d4b..0000000
--- a/target/board/module_x86_64/device.mk
+++ /dev/null
@@ -1,19 +0,0 @@
-#
-# Copyright (C) 2020 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/core_64_bit.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index f657fbf..7d9d90e 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -57,7 +57,6 @@
     $(LOCAL_DIR)/generic_system_x86_64.mk \
     $(LOCAL_DIR)/generic_system_x86_arm.mk \
     $(LOCAL_DIR)/generic_x86.mk \
-    $(LOCAL_DIR)/gsi_arm64.mk \
     $(LOCAL_DIR)/mainline_system_arm64.mk \
     $(LOCAL_DIR)/mainline_system_x86.mk \
     $(LOCAL_DIR)/mainline_system_x86_64.mk \
@@ -73,7 +72,12 @@
 
 endif
 
-PRODUCT_MAKEFILES += $(LOCAL_DIR)/mainline_sdk.mk
+PRODUCT_MAKEFILES += \
+    $(LOCAL_DIR)/mainline_sdk.mk \
+    $(LOCAL_DIR)/module_arm.mk \
+    $(LOCAL_DIR)/module_arm64.mk \
+    $(LOCAL_DIR)/module_x86.mk \
+    $(LOCAL_DIR)/module_x86_64.mk \
 
 COMMON_LUNCH_CHOICES := \
     aosp_arm64-eng \
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 2660d69..09864bc 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -214,6 +214,7 @@
     ndc \
     netd \
     NetworkStackNext \
+    odsign \
     org.apache.http.legacy \
     otacerts \
     PackageInstaller \
@@ -400,5 +401,8 @@
 PRODUCT_COPY_FILES += $(call add-to-product-copy-files-if-exists,\
     frameworks/base/config/dirty-image-objects:system/etc/dirty-image-objects)
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/bootclasspath.mk)
+# This property allows enabling Keystore 2.0 selectively for testing.
+# TODO Remove when Keystore 2.0 migration is complete. b/171563717
+PRODUCT_SYSTEM_PROPERTIES += persist.android.security.keystore2.enable=false
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
diff --git a/target/product/bootclasspath.mk b/target/product/default_art_config.mk
similarity index 78%
rename from target/product/bootclasspath.mk
rename to target/product/default_art_config.mk
index 60dd071..1545780 100644
--- a/target/product/bootclasspath.mk
+++ b/target/product/default_art_config.mk
@@ -36,7 +36,8 @@
     com.android.permission:framework-permission \
     com.android.sdkext:framework-sdkextensions \
     com.android.wifi:framework-wifi \
-    com.android.tethering:framework-tethering
+    com.android.tethering:framework-tethering \
+    com.android.ipsec:android.net.ipsec.ike
 
 # Add the compatibility library that is needed when android.test.base
 # is removed from the bootclasspath.
@@ -47,3 +48,12 @@
 else
   PRODUCT_BOOT_JARS += android.test.base
 endif
+
+# Minimal configuration for running dex2oat (default argument values).
+# PRODUCT_USES_DEFAULT_ART_CONFIG must be true to enable boot image compilation.
+PRODUCT_USES_DEFAULT_ART_CONFIG := true
+PRODUCT_SYSTEM_PROPERTIES += \
+    dalvik.vm.image-dex2oat-Xms=64m \
+    dalvik.vm.image-dex2oat-Xmx=64m \
+    dalvik.vm.dex2oat-Xms=64m \
+    dalvik.vm.dex2oat-Xmx=512m \
diff --git a/target/product/generic.mk b/target/product/generic.mk
index d3f81b1..fb5b727 100644
--- a/target/product/generic.mk
+++ b/target/product/generic.mk
@@ -29,4 +29,10 @@
 PRODUCT_NAME := generic
 
 allowed_list := product_manifest.xml
+
+# TODO(b/182105280): When ART prebuilts are used in this product, Soong doesn't
+# produce any Android.mk entries for them. Exclude them until that problem is
+# fixed.
+allowed_list += com.android.art com.android.art.debug
+
 $(call enforce-product-packages-exist,$(allowed_list))
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index 9580ade..1f310c9 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -32,8 +32,6 @@
 PRODUCT_PACKAGES += \
     LiveWallpapersPicker \
     PartnerBookmarksProvider \
-    PresencePolling \
-    RcsService \
     Stk \
     Tag \
     TimeZoneUpdater \
diff --git a/target/product/gsi/Android.bp b/target/product/gsi/Android.bp
index b7ce86e..88472eb 100644
--- a/target/product/gsi/Android.bp
+++ b/target/product/gsi/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 filegroup {
     name: "vndk_lib_lists",
     srcs: [
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index b4df5fe..ecce01a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -63,6 +63,8 @@
 # Script to update the latest VNDK lib list
 include $(CLEAR_VARS)
 LOCAL_MODULE := update-vndk-list.sh
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := EXECUTABLES
 LOCAL_MODULE_STEM := $(LOCAL_MODULE)
 LOCAL_IS_HOST_MODULE := true
@@ -146,6 +148,8 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := vndk_package
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 # Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
 LOCAL_REQUIRED_MODULES := \
     $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
@@ -161,11 +165,16 @@
 include $(BUILD_PHONY_PACKAGE)
 
 include $(CLEAR_VARS)
-_vndk_versions := $(PRODUCT_EXTRA_VNDK_VERSIONS)
+_vndk_versions :=
+ifeq ($(filter com.android.vndk.current.on_vendor, $(PRODUCT_PACKAGES)),)
+	_vndk_versions += $(PRODUCT_EXTRA_VNDK_VERSIONS)
+endif
 ifneq ($(BOARD_VNDK_VERSION),current)
 	_vndk_versions += $(BOARD_VNDK_VERSION)
 endif
 LOCAL_MODULE := vndk_apex_snapshot_package
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(foreach vndk_ver,$(_vndk_versions),com.android.vndk.v$(vndk_ver))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -178,6 +187,8 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := gsi_skip_mount.cfg
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_STEM := skip_mount.cfg
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
@@ -201,6 +212,8 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := init.gsi.rc
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_SYSTEM_EXT_MODULE := true
@@ -211,6 +224,8 @@
 
 include $(CLEAR_VARS)
 LOCAL_MODULE := init.vndk-nodef.rc
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_SYSTEM_EXT_MODULE := true
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 4935a3d..c753e6c 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -57,6 +57,7 @@
 VNDK-SP: libutilscallstack.so
 VNDK-SP: libz.so
 VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk_platform.so
 VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk_platform.so
 VNDK-core: android.hardware.configstore-utils.so
 VNDK-core: android.hardware.configstore@1.0.so
@@ -67,6 +68,7 @@
 VNDK-core: android.hardware.graphics.allocator@4.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health.storage-V1-ndk_platform.so
 VNDK-core: android.hardware.identity-V2-ndk_platform.so
 VNDK-core: android.hardware.keymaster-V2-ndk_platform.so
 VNDK-core: android.hardware.light-V1-ndk_platform.so
@@ -77,16 +79,19 @@
 VNDK-core: android.hardware.memtrack@1.0.so
 VNDK-core: android.hardware.oemlock-V1-ndk_platform.so
 VNDK-core: android.hardware.power-V1-ndk_platform.so
+VNDK-core: android.hardware.power.stats-V1-ndk_platform.so
 VNDK-core: android.hardware.rebootescrow-V1-ndk_platform.so
 VNDK-core: android.hardware.security.keymint-V1-ndk_platform.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk_platform.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk_platform.so
 VNDK-core: android.hardware.soundtrigger@2.0-core.so
 VNDK-core: android.hardware.soundtrigger@2.0.so
 VNDK-core: android.hardware.vibrator-V1-ndk_platform.so
+VNDK-core: android.hardware.weaver-V1-ndk_platform.so
 VNDK-core: android.hidl.token@1.0-utils.so
 VNDK-core: android.hidl.token@1.0.so
 VNDK-core: android.system.keystore2-V1-ndk_platform.so
 VNDK-core: android.system.suspend@1.0.so
-VNDK-core: libadf.so
 VNDK-core: libaudioroute.so
 VNDK-core: libaudioutils.so
 VNDK-core: libbinder.so
diff --git a/target/product/gsi_arm64.mk b/target/product/gsi_arm64.mk
deleted file mode 100644
index 1043a85..0000000
--- a/target/product/gsi_arm64.mk
+++ /dev/null
@@ -1,46 +0,0 @@
-#
-# Copyright (C) 2019 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#
-# All components inherited here go to system image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/generic_system.mk)
-
-# Enable mainline checking
-PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
-
-#
-# All components inherited here go to system_ext image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
-
-#
-# All components inherited here go to product image
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
-
-#
-# Special settings for GSI releasing
-#
-$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
-
-
-PRODUCT_NAME := gsi_arm64
-PRODUCT_DEVICE := gsi_arm64
-PRODUCT_BRAND := generic
-PRODUCT_MODEL := GSI on ARM64
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 241b6ba..25716ce 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -34,12 +34,12 @@
 # Split selinux policy
 PRODUCT_FULL_TREBLE_OVERRIDE := true
 
+# Enable dynamic partitions to facilitate mixing onto Cuttlefish
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
 # Enable dynamic partition size
 PRODUCT_USE_DYNAMIC_PARTITION_SIZE := true
 
-# Needed by Pi newly launched device to pass VtsTrebleSysProp on GSI
-PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE := true
-
 # GSI targets should install "unflattened" APEXes in /system
 TARGET_FLATTEN_APEX := false
 
diff --git a/target/product/media_system.mk b/target/product/media_system.mk
index 4ebec51..143131e 100644
--- a/target/product/media_system.mk
+++ b/target/product/media_system.mk
@@ -58,7 +58,6 @@
 # The values should be of the format <apex name>:<jar name>
 PRODUCT_UPDATABLE_SYSTEM_SERVER_JARS := \
     com.android.permission:service-permission \
-    com.android.ipsec:android.net.ipsec.ike \
 
 PRODUCT_COPY_FILES += \
     system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
diff --git a/target/board/module_arm/device.mk b/target/product/module_arm.mk
similarity index 73%
copy from target/board/module_arm/device.mk
copy to target/product/module_arm.mk
index 7cac5f8..d99dce8 100644
--- a/target/board/module_arm/device.mk
+++ b/target/product/module_arm.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,5 +14,8 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+
+PRODUCT_NAME := module_arm
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_arm
diff --git a/target/board/module_arm64/device.mk b/target/product/module_arm64.mk
similarity index 67%
rename from target/board/module_arm64/device.mk
rename to target/product/module_arm64.mk
index 9065d4b..fc9529c 100644
--- a/target/board/module_arm64/device.mk
+++ b/target/product/module_arm64.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,6 +14,9 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/core_64_bit.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+
+PRODUCT_NAME := module_arm64
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_arm64
diff --git a/target/board/module_arm/device.mk b/target/product/module_common.mk
similarity index 74%
rename from target/board/module_arm/device.mk
rename to target/product/module_common.mk
index 7cac5f8..eedd479 100644
--- a/target/board/module_arm/device.mk
+++ b/target/product/module_common.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,5 +14,5 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
diff --git a/target/board/module_arm/device.mk b/target/product/module_x86.mk
similarity index 73%
copy from target/board/module_arm/device.mk
copy to target/product/module_x86.mk
index 7cac5f8..b852e7a 100644
--- a/target/board/module_arm/device.mk
+++ b/target/product/module_x86.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,5 +14,8 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+
+PRODUCT_NAME := module_x86
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_x86
diff --git a/target/board/module_arm64/device.mk b/target/product/module_x86_64.mk
similarity index 66%
copy from target/board/module_arm64/device.mk
copy to target/product/module_x86_64.mk
index 9065d4b..f6bc1fc 100644
--- a/target/board/module_arm64/device.mk
+++ b/target/product/module_x86_64.mk
@@ -1,5 +1,5 @@
 #
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -14,6 +14,9 @@
 # limitations under the License.
 #
 
-$(call inherit-product, build/make/target/product/bootclasspath.mk)
-$(call inherit-product, build/make/target/product/core_64_bit.mk)
-$(call inherit-product, build/make/target/product/languages_default.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+
+PRODUCT_NAME := module_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_x86_64
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 687e9f6..b588c78 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -16,6 +16,8 @@
 
 # Provides a functioning ART environment without Android frameworks
 
+$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
+
 # Additional mixins to the boot classpath.
 PRODUCT_PACKAGES += \
     android.test.base \
@@ -28,10 +30,43 @@
 PRODUCT_PACKAGES += com.android.runtime
 
 # ART APEX module.
-# Note that this package includes the minimal boot classpath JARs (listed in
-# ART_APEX_JARS), which should no longer be added directly to PRODUCT_PACKAGES.
-PRODUCT_PACKAGES += com.android.art-autoselect
-PRODUCT_HOST_PACKAGES += com.android.art-autoselect
+#
+# Select either release (com.android.art) or debug (com.android.art.debug)
+# variant of the ART APEX. By default, "user" build variants contain the release
+# module, while the "eng" build variant contain the debug module. However, if
+# `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is defined, it overrides the previous
+# logic:
+# - if `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is set to `false`, the
+#   build will include the release module (whatever the build
+#   variant);
+# - if `PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD` is set to `true`, the
+#   build will include the debug module (whatever the build variant).
+#
+# Note that the ART APEX package includes the minimal boot classpath JARs
+# (listed in ART_APEX_JARS), which should no longer be added directly to
+# PRODUCT_PACKAGES.
+
+art_target_include_debug_build := $(PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD)
+ifneq (false,$(art_target_include_debug_build))
+  ifneq (,$(filter eng,$(TARGET_BUILD_VARIANT)))
+    art_target_include_debug_build := true
+  endif
+endif
+
+ifeq (true,$(art_target_include_debug_build))
+  PRODUCT_PACKAGES += com.android.art.debug
+  apex_test_module := art-check-debug-apex-gen-fakebin
+else
+  PRODUCT_PACKAGES += com.android.art
+  apex_test_module := art-check-release-apex-gen-fakebin
+endif
+
+ifeq (true,$(SOONG_CONFIG_art_module_source_build)
+  PRODUCT_HOST_PACKAGES += $(apex_test_module)
+endif
+
+art_target_include_debug_build :=
+apex_test_module :=
 
 # Certificates.
 PRODUCT_PACKAGES += \
@@ -41,10 +76,6 @@
     hiddenapi-package-whitelist.xml \
 
 PRODUCT_SYSTEM_PROPERTIES += \
-    dalvik.vm.image-dex2oat-Xms=64m \
-    dalvik.vm.image-dex2oat-Xmx=64m \
-    dalvik.vm.dex2oat-Xms=64m \
-    dalvik.vm.dex2oat-Xmx=512m \
     dalvik.vm.usejit=true \
     dalvik.vm.usejitprofiles=true \
     dalvik.vm.dexopt.secondary=true \
@@ -58,17 +89,18 @@
 ifeq (eng,$(TARGET_BUILD_VARIANT))
     PRODUCT_SYSTEM_PROPERTIES += \
         pm.dexopt.first-boot?=extract \
-        pm.dexopt.boot?=extract
+        pm.dexopt.boot-after-ota?=extract
 else
     PRODUCT_SYSTEM_PROPERTIES += \
-        pm.dexopt.first-boot?=quicken \
-        pm.dexopt.boot?=verify
+        pm.dexopt.first-boot?=verify \
+        pm.dexopt.boot-after-ota?=verify
 endif
 
 # The install filter is speed-profile in order to enable the use of
 # profiles from the dex metadata files. Note that if a profile is not provided
 # or if it is empty speed-profile is equivalent to (quicken + empty app image).
 PRODUCT_SYSTEM_PROPERTIES += \
+    pm.dexopt.post-boot?=extract \
     pm.dexopt.install?=speed-profile \
     pm.dexopt.install-fast?=skip \
     pm.dexopt.install-bulk?=speed-profile \
@@ -97,4 +129,14 @@
     dalvik.vm.minidebuginfo=true \
     dalvik.vm.dex2oat-minidebuginfo=true
 
-PRODUCT_USES_DEFAULT_ART_CONFIG := true
+# Two other device configs are added to IORap besides "ro.iorapd.enable".
+# IORap by default is off and starts when
+# (https://source.corp.google.com/android/system/iorap/iorapd.rc?q=iorapd.rc)
+#
+# * "ro.iorapd.enable" is true excluding unset
+# * One of the device configs is true.
+#
+# "ro.iorapd.enable" has to be set to true, so that iorap can be started.
+PRODUCT_SYSTEM_PROPERTIES += \
+    ro.iorapd.enable?=true
+
diff --git a/target/product/security/Android.bp b/target/product/security/Android.bp
index 5f4f82b..98698c5 100644
--- a/target/product/security/Android.bp
+++ b/target/product/security/Android.bp
@@ -1,4 +1,13 @@
 // AOSP test certificate
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 android_app_certificate {
     name: "aosp-testkey",
     certificate: "testkey",
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index d6a8b53..83f0a4b 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -5,6 +5,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := verity_key
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_SRC_FILES := $(LOCAL_MODULE)
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
@@ -24,6 +26,8 @@
 ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
   include $(CLEAR_VARS)
   LOCAL_MODULE := verity_key_ramdisk
+  LOCAL_LICENSE_KINDS := legacy_restricted
+  LOCAL_LICENSE_CONDITIONS := restricted
   LOCAL_MODULE_CLASS := ETC
   LOCAL_SRC_FILES := verity_key
   LOCAL_MODULE_STEM := verity_key
@@ -37,6 +41,8 @@
   ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
     include $(CLEAR_VARS)
     LOCAL_MODULE := adb_keys
+    LOCAL_LICENSE_KINDS := legacy_restricted
+    LOCAL_LICENSE_CONDITIONS := restricted
     LOCAL_MODULE_CLASS := ETC
     LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
     LOCAL_PREBUILT_MODULE_FILE := $(PRODUCT_ADB_KEYS)
@@ -51,6 +57,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := otacerts
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_STEM := otacerts.zip
 LOCAL_MODULE_PATH := $(TARGET_OUT_ETC)/security
@@ -65,6 +73,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := otacerts.recovery
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_MODULE_STEM := otacerts.zip
 LOCAL_MODULE_PATH := $(TARGET_RECOVERY_ROOT_OUT)/system/etc/security
diff --git a/target/product/sysconfig/Android.bp b/target/product/sysconfig/Android.bp
index 5632d17..29122e4 100644
--- a/target/product/sysconfig/Android.bp
+++ b/target/product/sysconfig/Android.bp
@@ -12,6 +12,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 prebuilt_etc {
     name: "preinstalled-packages-platform-aosp-product.xml",
     product_specific: true,
@@ -30,4 +34,4 @@
     product_specific: true,
     sub_dir: "sysconfig",
     src: "preinstalled-packages-platform-handheld-product.xml",
-}
\ No newline at end of file
+}
diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk
index b9f3fc8..8301047 100644
--- a/target/product/virtual_ab_ota/compression.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -17,7 +17,7 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/launch_with_vendor_ramdisk.mk)
 
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
-
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
 PRODUCT_PACKAGES += \
     snapuserd.vendor_ramdisk \
     snapuserd \
diff --git a/target/product/virtual_ab_ota/compression_retrofit.mk b/target/product/virtual_ab_ota/compression_retrofit.mk
index 5da8b54..6c29cba 100644
--- a/target/product/virtual_ab_ota/compression_retrofit.mk
+++ b/target/product/virtual_ab_ota/compression_retrofit.mk
@@ -15,6 +15,7 @@
 #
 
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
 
 # For devices that are not GKI-capable (eg do not have vendor_boot),
 # snapuserd.ramdisk is included rather than snapuserd.vendor_ramdisk.
diff --git a/tools/Android.bp b/tools/Android.bp
index e0f3739..269e610 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -12,6 +12,19 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    //   SPDX-license-identifier-CC-BY
+    //   SPDX-license-identifier-GPL
+    //   SPDX-license-identifier-MIT
+    default_applicable_licenses: ["build_make_license"],
+}
+
 python_binary_host {
   name: "generate-self-extracting-archive",
   srcs: ["generate-self-extracting-archive.py"],
@@ -62,10 +75,10 @@
   srcs: ["extract_kernel.py"],
   version: {
     py2: {
-      enabled: true,
+      enabled: false,
     },
     py3: {
-      enabled: false,
+      enabled: true,
     },
   },
 }
diff --git a/tools/acp/Android.bp b/tools/acp/Android.bp
index 64f5a10..78738b0 100644
--- a/tools/acp/Android.bp
+++ b/tools/acp/Android.bp
@@ -2,6 +2,15 @@
 //
 // Custom version of cp.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 cc_binary_host {
 
     srcs: ["acp.c"],
diff --git a/tools/apicheck/Android.bp b/tools/apicheck/Android.bp
index 8fe20e9..f58042f 100644
--- a/tools/apicheck/Android.bp
+++ b/tools/apicheck/Android.bp
@@ -12,6 +12,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 java_binary_host {
     name: "apicheck",
     wrapper: "etc/apicheck",
diff --git a/tools/atree/Android.bp b/tools/atree/Android.bp
index 5fbe042..7906d8b 100644
--- a/tools/atree/Android.bp
+++ b/tools/atree/Android.bp
@@ -2,6 +2,15 @@
 //
 // Copies files into the directory structure described by a manifest
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 cc_binary_host {
     name: "atree",
     srcs: [
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 9bee115..f27ed8c 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -35,9 +35,6 @@
 if [ -n "$TARGET_CPU_ABI2" ] ; then
   echo "ro.product.cpu.abi2=$TARGET_CPU_ABI2"
 fi
-echo "ro.product.cpu.abilist=$TARGET_CPU_ABI_LIST"
-echo "ro.product.cpu.abilist32=$TARGET_CPU_ABI_LIST_32_BIT"
-echo "ro.product.cpu.abilist64=$TARGET_CPU_ABI_LIST_64_BIT"
 
 if [ -n "$PRODUCT_DEFAULT_LOCALE" ] ; then
   echo "ro.product.locale=$PRODUCT_DEFAULT_LOCALE"
diff --git a/tools/compare_builds.py b/tools/compare_builds.py
new file mode 100755
index 0000000..838a628
--- /dev/null
+++ b/tools/compare_builds.py
@@ -0,0 +1,661 @@
+#!/usr/bin/env -S python3 -u
+
+"""
+This script helps find various build behaviors that make builds less hermetic
+and repeatable. Depending on the flags, it runs a sequence of builds and looks
+for files that have changed or have been improperly regenerated, updating
+their timestamps incorrectly. It also looks for changes that the build has
+done to the source tree, and for files whose contents are dependent on the
+location of the out directory.
+
+This utility has two major modes, full and incremental. By default, this tool
+runs in full mode. To run in incremental mode, pass the --incremental flag.
+
+
+FULL MODE
+
+In full mode, this tool helps verify BUILD CORRECTNESS by examining its
+REPEATABILITY. In full mode, this tool runs two complete builds in different
+directories and compares the CONTENTS of the two directories. Lists of any
+files that are added, removed or changed are printed, sorted by the timestamp
+of that file, to aid finding which dependencies trigger the rebuilding of
+other files.
+
+
+INCREMENTAL MODE
+
+In incremental mode, this tool helps verfiy the SPEED of the build. It runs two
+builds and looks at the TIMESTAMPS of the generated files, and reports files
+that were changed by the second build. In theory, an incremental build with no
+source files touched should not have any generated targets changed. As in full
+builds, the file list is returned sorted by timestamp.
+
+
+OTHER CHECKS
+
+In both full and incremental mode, this tool looks at the timestamps of all
+source files in the tree, and reports on files that have been touched. In the
+output, these are labeled with the header "Source files touched after start of
+build."
+
+In addition, by default, this tool sets the OUT_DIR environment variable to
+something other than "out" in order to find build rules that are not respecting
+the OUT_DIR. If you see these, you should fix them, but if your build can not
+complete for some reason because of this, you can pass the --no-check-out-dir
+flag to suppress this check.
+
+
+OTHER FLAGS
+
+In full mode, the --detect-embedded-paths flag does the two builds in different
+directories, to help in finding rules that embed the out directory path into
+the targets.
+
+The --hide-build-output flag hides the output of successful bulds, to make
+script output cleaner. The output of builds that fail is still shown.
+
+The --no-build flag is useful if you have already done a build and would
+just like to re-run the analysis.
+
+The --target flag lets you specify a build target other than the default
+full build (droid). You can pass "nothing" as in the example below, or a
+specific target, to reduce the scope of the checks performed.
+
+The --touch flag lets you specify a list of source files to touch between
+the builds, to examine the consequences of editing a particular file.
+
+
+EXAMPLE COMMANDLINES
+
+Please run build/make/tools/compare_builds.py --help for a full listing
+of the commandline flags. Here are a sampling of useful combinations.
+
+  1. Find files changed during an incremental build that doesn't build
+     any targets.
+
+       build/make/tools/compare_builds.py --incremental --target nothing
+
+     Long incremental build times, or consecutive builds that re-run build actions
+     are usually caused by files being touched as part of loading the makefiles.
+
+     The nothing build (m nothing) loads the make and blueprint files, generates
+     the dependency graph, but then doesn't actually build any targets. Checking
+     against this build is the fastest and easiest way to find files that are
+     modified while makefiles are read, for example with $(shell) invocations.
+
+  2. Find packaging targets that are different, ignoring intermediate files.
+
+       build/make/tools/compare_builds.py --subdirs --detect-embedded-paths
+
+     These flags will compare the final staging directories for partitions,
+     as well as the APKs, apexes, testcases, and the like (the full directory
+     list is in the DEFAULT_DIRS variable below). Since these are the files
+     that are ultimately released, it is more important that these files be
+     replicable, even if the intermediates that went into them are not (for
+     example, when debugging symbols are stripped).
+
+  3. Check that all targets are repeatable.
+
+       build/make/tools/compare_builds.py --detect-embedded-paths
+
+     This check will list all of the differences in built targets that it can
+     find. Be aware that the AOSP tree still has quite a few targets that
+     are flagged by this check, so OEM changes might be lost in that list.
+     That said, each file shown here is a potential blocker for a repeatable
+     build.
+
+  4. See what targets are rebuilt when a file is touched between builds.
+
+       build/make/tools/compare_builds.py --incremental \
+            --touch frameworks/base/core/java/android/app/Activity.java
+
+     This check simulates the common engineer workflow of touching a single
+     file and rebuilding the whole system. To see a restricted view, consider
+     also passing a --target option for a common use case. For example:
+
+       build/make/tools/compare_builds.py --incremental --target framework \
+            --touch frameworks/base/core/java/android/app/Activity.java
+"""
+
+import argparse
+import itertools
+import os
+import shutil
+import stat
+import subprocess
+import sys
+
+
+# Soong
+SOONG_UI = "build/soong/soong_ui.bash"
+
+
+# Which directories to use if no --subdirs is supplied without explicit directories.
+DEFAULT_DIRS = (
+    "apex",
+    "data",
+    "product",
+    "ramdisk",
+    "recovery",
+    "root",
+    "system",
+    "system_ext",
+    "system_other",
+    "testcases",
+    "vendor",
+)
+
+
+# Files to skip for incremental timestamp checking
+BUILD_INTERNALS_PREFIX_SKIP = (
+    "soong/.glob/",
+    ".path/",
+)
+
+
+BUILD_INTERNALS_SUFFIX_SKIP = (
+    "/soong/soong_build_metrics.pb",
+    "/.installable_test_files",
+    "/files.db",
+    "/.blueprint.bootstrap",
+    "/build_number.txt",
+    "/build.ninja",
+    "/.out-dir",
+    "/build_fingerprint.txt",
+    "/build_thumbprint.txt",
+    "/.copied_headers_list",
+    "/.installable_files",
+)
+
+
+class DiffType(object):
+  def __init__(self, code, message):
+    self.code = code
+    self.message = message
+
+DIFF_NONE = DiffType("DIFF_NONE", "Files are the same")
+DIFF_MODE = DiffType("DIFF_MODE", "Stat mode bits differ")
+DIFF_SIZE = DiffType("DIFF_SIZE", "File size differs")
+DIFF_SYMLINK = DiffType("DIFF_SYMLINK", "Symlinks point to different locations")
+DIFF_CONTENTS = DiffType("DIFF_CONTENTS", "File contents differ")
+
+
+def main():
+  argparser = argparse.ArgumentParser(description="Diff build outputs from two builds.",
+                                      epilog="Run this command from the root of the tree."
+                                        + " Before running this command, the build environment"
+                                        + " must be set up, including sourcing build/envsetup.sh"
+                                        + " and running lunch.")
+  argparser.add_argument("--detect-embedded-paths", action="store_true",
+      help="Use unique out dirs to detect paths embedded in binaries.")
+  argparser.add_argument("--incremental", action="store_true",
+      help="Compare which files are touched in two consecutive builds without a clean in between.")
+  argparser.add_argument("--hide-build-output", action="store_true",
+      help="Don't print the build output for successful builds")
+  argparser.add_argument("--no-build", dest="run_build", action="store_false",
+      help="Don't build or clean, but do everything else.")
+  argparser.add_argument("--no-check-out-dir", dest="check_out_dir", action="store_false",
+      help="Don't check for rules not honoring movable out directories.")
+  argparser.add_argument("--subdirs", nargs="*",
+      help="Only scan these subdirs of $PRODUCT_OUT instead of the whole out directory."
+           + " The --subdirs argument with no listed directories will give a default list.")
+  argparser.add_argument("--target", default="droid",
+      help="Make target to run. The default is droid")
+  argparser.add_argument("--touch", nargs="+", default=[],
+      help="Files to touch between builds. Must pair with --incremental.")
+  args = argparser.parse_args(sys.argv[1:])
+
+  if args.detect_embedded_paths and args.incremental:
+    sys.stderr.write("Can't pass --detect-embedded-paths and --incremental together.\n")
+    sys.exit(1)
+  if args.detect_embedded_paths and not args.check_out_dir:
+    sys.stderr.write("Can't pass --detect-embedded-paths and --no-check-out-dir together.\n")
+    sys.exit(1)
+  if args.touch and not args.incremental:
+    sys.stderr.write("The --incremental flag is required if the --touch flag is passed.")
+    sys.exit(1)
+
+  AssertAtTop()
+  RequireEnvVar("TARGET_PRODUCT")
+  RequireEnvVar("TARGET_BUILD_VARIANT")
+
+  # Out dir file names:
+  #   - dir_prefix - The directory we'll put everything in (except for maybe the top level
+  #     out/ dir).
+  #   - *work_dir - The directory that we will build directly into. This is in dir_prefix
+  #     unless --no-check-out-dir is set.
+  #   - *out_dir - After building, if work_dir is different from out_dir, we move the out
+  #     directory to here so we can do the comparisions.
+  #   - timestamp_* - Files we touch so we know the various phases between the builds, so we
+  #     can compare timestamps of files.
+  if args.incremental:
+    dir_prefix = "out_incremental"
+    if args.check_out_dir:
+      first_work_dir = first_out_dir = dir_prefix + "/out"
+      second_work_dir = second_out_dir = dir_prefix + "/out"
+    else:
+      first_work_dir = first_out_dir = "out"
+      second_work_dir = second_out_dir = "out"
+  else:
+    dir_prefix = "out_full"
+    first_out_dir = dir_prefix + "/out_1"
+    second_out_dir = dir_prefix + "/out_2"
+    if not args.check_out_dir:
+      first_work_dir = second_work_dir = "out"
+    elif args.detect_embedded_paths:
+      first_work_dir = first_out_dir
+      second_work_dir = second_out_dir
+    else:
+      first_work_dir = dir_prefix + "/work"
+      second_work_dir = dir_prefix + "/work"
+  timestamp_start = dir_prefix + "/timestamp_start"
+  timestamp_between = dir_prefix + "/timestamp_between"
+  timestamp_end = dir_prefix + "/timestamp_end"
+
+  if args.run_build:
+    # Initial clean, if necessary
+    print("Cleaning " + dir_prefix + "/")
+    Clean(dir_prefix)
+    print("Cleaning out/")
+    Clean("out")
+    CreateEmptyFile(timestamp_start)
+    print("Running the first build in " + first_work_dir)
+    RunBuild(first_work_dir, first_out_dir, args.target, args.hide_build_output)
+    for f in args.touch:
+      print("Touching " + f)
+      TouchFile(f)
+    CreateEmptyFile(timestamp_between)
+    print("Running the second build in " + second_work_dir)
+    RunBuild(second_work_dir, second_out_dir, args.target, args.hide_build_output)
+    CreateEmptyFile(timestamp_end)
+    print("Done building")
+    print()
+
+  # Which out directories to scan
+  if args.subdirs is not None:
+    if args.subdirs:
+      subdirs = args.subdirs
+    else:
+      subdirs = DEFAULT_DIRS
+    first_files = ProductFiles(RequireBuildVar(first_out_dir, "PRODUCT_OUT"), subdirs)
+    second_files = ProductFiles(RequireBuildVar(second_out_dir, "PRODUCT_OUT"), subdirs)
+  else:
+    first_files = OutFiles(first_out_dir)
+    second_files = OutFiles(second_out_dir)
+
+  printer = Printer()
+
+  if args.incremental:
+    # Find files that were rebuilt unnecessarily
+    touched_incrementally = FindOutFilesTouchedAfter(first_files,
+                                                     GetFileTimestamp(timestamp_between))
+    printer.PrintList("Touched in incremental build", touched_incrementally)
+  else:
+    # Compare the two out dirs
+    added, removed, changed = DiffFileList(first_files, second_files)
+    printer.PrintList("Added", added)
+    printer.PrintList("Removed", removed)
+    printer.PrintList("Changed", changed, "%s %s")
+
+  # Find files in the source tree that were touched
+  touched_during = FindSourceFilesTouchedAfter(GetFileTimestamp(timestamp_start))
+  printer.PrintList("Source files touched after start of build", touched_during)
+
+  # Find files and dirs that were output to "out" and didn't respect $OUT_DIR
+  if args.check_out_dir:
+    bad_out_dir_contents = FindFilesAndDirectories("out")
+    printer.PrintList("Files and directories created by rules that didn't respect $OUT_DIR",
+                      bad_out_dir_contents)
+
+  # If we didn't find anything, print success message
+  if not printer.printed_anything:
+    print("No bad behaviors found.")
+
+
+def AssertAtTop():
+  """If the current directory is not the top of an android source tree, print an error
+     message and exit."""
+  if not os.access(SOONG_UI, os.X_OK):
+    sys.stderr.write("FAILED: Please run from the root of the tree.\n")
+    sys.exit(1)
+
+
+def RequireEnvVar(name):
+  """Gets an environment variable. If that fails, then print an error message and exit."""
+  result = os.environ.get(name)
+  if not result:
+    sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+    sys.exit(1)
+  return result
+
+
+def RunSoong(out_dir, args, capture_output):
+  env = dict(os.environ)
+  env["OUT_DIR"] = out_dir
+  args = [SOONG_UI,] + args
+  if capture_output:
+    proc = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    combined_output, none = proc.communicate()
+    return proc.returncode, combined_output
+  else:
+    result = subprocess.run(args, env=env)
+    return result.returncode, None
+
+
+def GetBuildVar(out_dir, name):
+  """Gets a variable from the build system."""
+  returncode, output = RunSoong(out_dir, ["--dumpvar-mode", name], True)
+  if returncode != 0:
+    return None
+  else:
+    return output.decode("utf-8").strip()
+
+
+def RequireBuildVar(out_dir, name):
+  """Gets a variable from the builds system. If that fails, then print an error
+     message and exit."""
+  value = GetBuildVar(out_dir, name)
+  if not value:
+    sys.stderr.write("error: Can't determine %s. Please run lunch first.\n" % name)
+    sys.exit(1)
+  return value
+
+
+def Clean(directory):
+  """"Deletes the supplied directory."""
+  try:
+    shutil.rmtree(directory)
+  except FileNotFoundError:
+    pass
+
+
+def RunBuild(work_dir, out_dir, target, hide_build_output):
+  """Runs a build. If the build fails, prints a message and exits."""
+  returncode, output = RunSoong(work_dir,
+                    ["--build-mode", "--all-modules", "--dir=" + os.getcwd(), target],
+                    hide_build_output)
+  if work_dir != out_dir:
+    os.replace(work_dir, out_dir)
+  if returncode != 0:
+    if hide_build_output:
+      # The build output was hidden, so print it now for debugging
+      sys.stderr.buffer.write(output)
+    sys.stderr.write("FAILED: Build failed. Stopping.\n")
+    sys.exit(1)
+
+
+def DiffFileList(first_files, second_files):
+  """Examines the files.
+
+  Returns:
+    Filenames of files in first_filelist but not second_filelist (added files)
+    Filenames of files in second_filelist but not first_filelist (removed files)
+    2-Tuple of filenames for the files that are in both but are different (changed files)
+  """
+  # List of files, relative to their respective PRODUCT_OUT directories
+  first_filelist = sorted([x for x in first_files], key=lambda x: x[1])
+  second_filelist = sorted([x for x in second_files], key=lambda x: x[1])
+
+  added = []
+  removed = []
+  changed = []
+
+  first_index = 0
+  second_index = 0
+
+  while first_index < len(first_filelist) and second_index < len(second_filelist):
+    # Path relative to source root and path relative to PRODUCT_OUT
+    first_full_filename, first_relative_filename = first_filelist[first_index]
+    second_full_filename, second_relative_filename = second_filelist[second_index]
+
+    if first_relative_filename < second_relative_filename:
+      # Removed
+      removed.append(first_full_filename)
+      first_index += 1
+    elif first_relative_filename > second_relative_filename:
+      # Added
+      added.append(second_full_filename)
+      second_index += 1
+    else:
+      # Both present
+      diff_type = DiffFiles(first_full_filename, second_full_filename)
+      if diff_type != DIFF_NONE:
+        changed.append((first_full_filename, second_full_filename))
+      first_index += 1
+      second_index += 1
+
+  while first_index < len(first_filelist):
+    first_full_filename, first_relative_filename = first_filelist[first_index]
+    removed.append(first_full_filename)
+    first_index += 1
+
+  while second_index < len(second_filelist):
+    second_full_filename, second_relative_filename = second_filelist[second_index]
+    added.append(second_full_filename)
+    second_index += 1
+
+  return (SortByTimestamp(added),
+          SortByTimestamp(removed),
+          SortByTimestamp(changed, key=lambda item: item[1]))
+
+
+def FindOutFilesTouchedAfter(files, timestamp):
+  """Find files in the given file iterator that were touched after timestamp."""
+  result = []
+  for full, relative in files:
+    ts = GetFileTimestamp(full)
+    if ts > timestamp:
+      result.append(TouchedFile(full, ts))
+  return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def GetFileTimestamp(filename):
+  """Get timestamp for a file (just wraps stat)."""
+  st = os.stat(filename, follow_symlinks=False)
+  return st.st_mtime
+
+
+def SortByTimestamp(items, key=lambda item: item):
+  """Sort the list by timestamp of files.
+  Args:
+    items - the list of items to sort
+    key - a function to extract a filename from each element in items
+  """
+  return [x[0] for x in sorted([(item, GetFileTimestamp(key(item))) for item in items],
+                               key=lambda y: y[1])]
+
+
+def FindSourceFilesTouchedAfter(timestamp):
+  """Find files in the source tree that have changed after timestamp. Ignores
+  the out directory."""
+  result = []
+  for root, dirs, files in os.walk(".", followlinks=False):
+    if root == ".":
+      RemoveItemsFromList(dirs, (".repo", "out", "out_full", "out_incremental"))
+    for f in files:
+      full = os.path.sep.join((root, f))[2:]
+      ts = GetFileTimestamp(full)
+      if ts > timestamp:
+        result.append(TouchedFile(full, ts))
+  return [f.filename for f in sorted(result, key=lambda f: f.timestamp)]
+
+
+def FindFilesAndDirectories(directory):
+  """Finds all files and directories inside a directory."""
+  result = []
+  for root, dirs, files in os.walk(directory, followlinks=False):
+    result += [os.path.sep.join((root, x, "")) for x in dirs]
+    result += [os.path.sep.join((root, x)) for x in files]
+  return result
+
+
+def CreateEmptyFile(filename):
+  """Create an empty file with now as the timestamp at filename."""
+  try:
+    os.makedirs(os.path.dirname(filename))
+  except FileExistsError:
+    pass
+  open(filename, "w").close()
+  os.utime(filename)
+
+
+def TouchFile(filename):
+  os.utime(filename)
+
+
+def DiffFiles(first_filename, second_filename):
+  def AreFileContentsSame(remaining, first_filename, second_filename):
+    """Compare the file contents. They must be known to be the same size."""
+    CHUNK_SIZE = 32*1024
+    with open(first_filename, "rb") as first_file:
+      with open(second_filename, "rb") as second_file:
+        while remaining > 0:
+          size = min(CHUNK_SIZE, remaining)
+          if first_file.read(CHUNK_SIZE) != second_file.read(CHUNK_SIZE):
+            return False
+          remaining -= size
+        return True
+
+  first_stat = os.stat(first_filename, follow_symlinks=False)
+  second_stat = os.stat(first_filename, follow_symlinks=False)
+
+  # Mode bits
+  if first_stat.st_mode != second_stat.st_mode:
+    return DIFF_MODE
+
+  # File size
+  if first_stat.st_size != second_stat.st_size:
+    return DIFF_SIZE
+
+  # Contents
+  if stat.S_ISLNK(first_stat.st_mode):
+    if os.readlink(first_filename) != os.readlink(second_filename):
+      return DIFF_SYMLINK
+  elif stat.S_ISREG(first_stat.st_mode):
+    if not AreFileContentsSame(first_stat.st_size, first_filename, second_filename):
+      return DIFF_CONTENTS
+
+  return DIFF_NONE
+
+
+class FileIterator(object):
+  """Object that produces an iterator containing all files in a given directory.
+
+  Each iteration yields a tuple containing:
+
+  [0] (full) Path to file relative to source tree.
+  [1] (relative) Path to the file relative to the base directory given in the
+      constructor.
+  """
+
+  def __init__(self, base_dir):
+    self._base_dir = base_dir
+
+  def __iter__(self):
+    return self._Iterator(self, self._base_dir)
+
+  def ShouldIncludeFile(self, root, path):
+    return False
+
+  class _Iterator(object):
+    def __init__(self, parent, base_dir):
+      self._parent = parent
+      self._base_dir = base_dir
+      self._walker = os.walk(base_dir, followlinks=False)
+      self._current_index = 0
+      self._current_dir = []
+
+    def __iter__(self):
+      return self
+
+    def __next__(self):
+      # os.walk's iterator will eventually terminate by raising StopIteration
+      while True:
+        if self._current_index >= len(self._current_dir):
+          root, dirs, files = self._walker.__next__()
+          full_paths = [os.path.sep.join((root, f)) for f in files]
+          pairs = [(f, f[len(self._base_dir)+1:]) for f in full_paths]
+          self._current_dir = [(full, relative) for full, relative in pairs
+                               if self._parent.ShouldIncludeFile(root, relative)]
+          self._current_index = 0
+          if not self._current_dir:
+            continue
+        index = self._current_index
+        self._current_index += 1
+        return self._current_dir[index]
+
+
+class OutFiles(FileIterator):
+  """Object that produces an iterator containing all files in a given out directory,
+  except for files which are known to be touched as part of build setup.
+  """
+  def __init__(self, out_dir):
+    super().__init__(out_dir)
+    self._out_dir = out_dir
+
+  def ShouldIncludeFile(self, root, relative):
+    # Skip files in root, although note that this could actually skip
+    # files that are sadly generated directly into that directory.
+    if root == self._out_dir:
+      return False
+    # Skiplist
+    for skip in BUILD_INTERNALS_PREFIX_SKIP:
+      if relative.startswith(skip):
+        return False
+    for skip in BUILD_INTERNALS_SUFFIX_SKIP:
+      if relative.endswith(skip):
+        return False
+    return True
+
+
+class ProductFiles(FileIterator):
+  """Object that produces an iterator containing files in listed subdirectories of $PRODUCT_OUT.
+  """
+  def __init__(self, product_out, subdirs):
+    super().__init__(product_out)
+    self._subdirs = subdirs
+
+  def ShouldIncludeFile(self, root, relative):
+    for subdir in self._subdirs:
+      if relative.startswith(subdir):
+        return True
+    return False
+
+
+class TouchedFile(object):
+  """A file in the out directory with a timestamp."""
+  def __init__(self, filename, timestamp):
+    self.filename = filename
+    self.timestamp = timestamp
+
+
+def RemoveItemsFromList(haystack, needles):
+  for needle in needles:
+    try:
+      haystack.remove(needle)
+    except ValueError:
+      pass
+
+
+class Printer(object):
+  def __init__(self):
+    self.printed_anything = False
+
+  def PrintList(self, title, items, fmt="%s"):
+    if items:
+      if self.printed_anything:
+        sys.stdout.write("\n")
+      sys.stdout.write("%s:\n" % title)
+      for item in items:
+        sys.stdout.write("  %s\n" % fmt % item)
+      self.printed_anything = True
+
+
+if __name__ == "__main__":
+  try:
+    main()
+  except KeyboardInterrupt:
+    pass
+
+
+# vim: ts=2 sw=2 sts=2 nocindent
diff --git a/tools/droiddoc/Android.bp b/tools/droiddoc/Android.bp
index 0428068..efd30c1 100644
--- a/tools/droiddoc/Android.bp
+++ b/tools/droiddoc/Android.bp
@@ -12,6 +12,19 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   SPDX-license-identifier-Apache-2.0
+    //   SPDX-license-identifier-BSD
+    //   SPDX-license-identifier-CC-BY
+    //   SPDX-license-identifier-GPL
+    //   SPDX-license-identifier-MIT
+    default_applicable_licenses: ["build_make_license"],
+}
+
 droiddoc_exported_dir {
     name: "droiddoc-templates-pdk",
     path: "templates-pdk",
diff --git a/tools/exercise_compare_builds b/tools/exercise_compare_builds
new file mode 100755
index 0000000..38e8405
--- /dev/null
+++ b/tools/exercise_compare_builds
@@ -0,0 +1,66 @@
+#!/bin/bash
+# Tests for compare_builds.py
+# usage (from root of source tree):
+#   build/make/tools/exercise_compare_builds
+
+HIDE_BUILD_OUTPUT=--hide-build-output
+
+function run()
+{
+    echo
+    echo
+    echo ============================================================
+    echo $1
+    shift
+    echo ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+    echo ============================================================
+    time ./build/make/tools/compare_builds.py $HIDE_BUILD_OUTPUT --target incidentd $@
+}
+
+function run_tests()
+{
+    # These should error out
+
+    run "Incremental build,  Separate work dirs  (invalid flag combo, should error out)" \
+        --incremental --detect-embedded-paths
+    run "Use out/ as work dir, Separate work dirs  (invalid flag combo, should error out)" \
+        --no-check-out-dir --detect-embedded-paths
+
+    # Each grouping starts with a build, and the following ones use --no-build to save time
+
+    run "REBUILD: Full builds,  Same work dir,  Whole out dir"
+    run "Full builds,  Same work dir,  Default subdirs" \
+        --no-build --subdirs
+    run "Full builds,  Same work dir,  Only $PRODUCT_OUT/system" \
+        --no-build --subdirs system
+
+    run "REBUILD: Full builds,  Use out/ as work dir,  Whole out dir" \
+        --no-check-out-dir
+    run "Full builds,  Use out/ as work dir,  Default subdirs" \
+        --no-build --no-check-out-dir --subdirs
+    run "Full builds,  Use out/ as work dir,  Only $PRODUCT_OUT/system" \
+        --no-build --no-check-out-dir --subdirs system
+
+    run "REBUILD: Full builds,  Separate work dirs,  Whole out dir" \
+        --detect-embedded-paths
+    run "Full builds,  Separate work dirs,  Default subdirs" \
+        --no-build --detect-embedded-paths --subdirs
+    run "Full builds,  Separate work dirs,  Only $PRODUCT_OUT/system" \
+        --no-build --detect-embedded-paths --subdirs system
+
+    run "REBUILD: Incremental build,  Same work dir,  Whole out dir" \
+        --incremental
+    run "Incremental build,  Same work dir,  Default subdirs" \
+        --no-build --incremental --subdirs
+    run "Incremental build,  Same work dir,  Only $PRODUCT_OUT/system" \
+        --no-build --incremental --subdirs system
+
+    run "REBUILD: Incremental build,  Use out/ as work dir,  Whole out dir" \
+        --incremental --no-check-out-dir
+    run "Incremental build,  Use out/ as work dir,  Default subdirs" \
+        --no-build --incremental --no-check-out-dir --subdirs
+    run "Incremental build,  Use out/ as work dir,  Only $PRODUCT_OUT/system" \
+        --no-build --incremental --no-check-out-dir --subdirs system
+}
+
+time run_tests 2>&1 | tee exercise_compare_builds.txt
diff --git a/tools/extract_kernel.py b/tools/extract_kernel.py
index 0046b38..44fbcdf 100755
--- a/tools/extract_kernel.py
+++ b/tools/extract_kernel.py
@@ -39,12 +39,12 @@
 # "Linux version " UTS_RELEASE " (" LINUX_COMPILE_BY "@"
 # LINUX_COMPILE_HOST ") (" LINUX_COMPILER ") " UTS_VERSION "\n";
 LINUX_BANNER_PREFIX = b'Linux version '
-LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX + \
+LINUX_BANNER_REGEX = LINUX_BANNER_PREFIX.decode() + \
     r'(?P<release>(?P<version>[0-9]+[.][0-9]+[.][0-9]+).*) \(.*@.*\) \((?P<compiler>.*)\) .*\n'
 
 
 def get_from_release(input_bytes, start_idx, key):
-  null_idx = input_bytes.find('\x00', start_idx)
+  null_idx = input_bytes.find(b'\x00', start_idx)
   if null_idx < 0:
     return None
   try:
@@ -69,7 +69,7 @@
 
     value = get_from_release(input_bytes, idx, key)
     if value:
-      return value
+      return value.encode()
 
     idx += len(LINUX_BANNER_PREFIX)
 
@@ -140,7 +140,7 @@
   while True:
     idx = input_bytes.find(search_bytes, idx)
     if idx < 0:
-      raise StopIteration()
+      return
 
     yield try_decompress_bytes(cmd, input_bytes[idx:])
     idx += 1
@@ -183,6 +183,11 @@
       return False
   return True
 
+def to_bytes_io(b):
+  """
+  Make b, which is either sys.stdout or sys.stdin, receive bytes as arguments.
+  """
+  return b.buffer if sys.version_info.major == 3 else b
 
 def main():
   parser = argparse.ArgumentParser(
@@ -194,35 +199,35 @@
                       help='Input kernel image. If not specified, use stdin',
                       metavar='FILE',
                       type=argparse.FileType('rb'),
-                      default=sys.stdin)
+                      default=to_bytes_io(sys.stdin))
   parser.add_argument('--output-configs',
                       help='If specified, write configs. Use stdout if no file '
                            'is specified.',
                       metavar='FILE',
                       nargs='?',
                       type=argparse.FileType('wb'),
-                      const=sys.stdout)
+                      const=to_bytes_io(sys.stdout))
   parser.add_argument('--output-version',
                       help='If specified, write version. Use stdout if no file '
                            'is specified.',
                       metavar='FILE',
                       nargs='?',
                       type=argparse.FileType('wb'),
-                      const=sys.stdout)
+                      const=to_bytes_io(sys.stdout))
   parser.add_argument('--output-release',
                       help='If specified, write kernel release. Use stdout if '
                            'no file is specified.',
                       metavar='FILE',
                       nargs='?',
                       type=argparse.FileType('wb'),
-                      const=sys.stdout)
+                      const=to_bytes_io(sys.stdout))
   parser.add_argument('--output-compiler',
                       help='If specified, write the compiler information. Use stdout if no file '
                            'is specified.',
                       metavar='FILE',
                       nargs='?',
                       type=argparse.FileType('wb'),
-                      const=sys.stdout)
+                      const=to_bytes_io(sys.stdout))
   parser.add_argument('--tools',
                       help='Decompression tools to use. If not specified, PATH '
                            'is searched.',
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 1dd5e4a..4544e07 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -12,6 +12,15 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 bootstrap_go_package {
     name: "soong-fs_config",
     pkgPath: "android/soong/fs_config",
diff --git a/tools/fs_config/Android.mk b/tools/fs_config/Android.mk
index c338462..10d25e0 100644
--- a/tools/fs_config/Android.mk
+++ b/tools/fs_config/Android.mk
@@ -42,6 +42,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := \
   fs_config_dirs_system \
   fs_config_dirs_system_ext \
@@ -55,6 +57,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := \
   fs_config_files_system \
   fs_config_files_system_ext \
@@ -69,6 +73,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_system_ext)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -79,6 +85,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_SYSTEM_EXTIMAGE)$(BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_system_ext)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -89,6 +97,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_dirs_product)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -99,6 +109,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(if $(BOARD_USES_PRODUCTIMAGE)$(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE),_fs_config_files_product)
 include $(BUILD_PHONY_PACKAGE)
 
@@ -109,6 +121,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_nonsystem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_dirs_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -119,6 +133,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_nonsystem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_REQUIRED_MODULES := $(foreach t,$(fs_config_generate_extra_partition_list),_fs_config_files_$(t))
 include $(BUILD_PHONY_PACKAGE)
 
@@ -129,6 +145,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_dirs_system
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 include $(BUILD_SYSTEM)/base_rules.mk
@@ -154,6 +172,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := fs_config_files_system
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 include $(BUILD_SYSTEM)/base_rules.mk
@@ -180,6 +200,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_vendor
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -204,6 +226,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_vendor
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR)/etc
@@ -231,6 +255,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_oem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -255,6 +281,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_oem
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_OEM)/etc
@@ -282,6 +310,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_odm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -306,6 +336,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_odm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM)/etc
@@ -333,6 +365,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_vendor_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -357,6 +391,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_vendor_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_VENDOR_DLKM)/etc
@@ -384,6 +420,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_odm_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -408,6 +446,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_odm_dlkm
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_ODM_DLKM)/etc
@@ -435,6 +475,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -459,6 +501,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_product
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_PRODUCT)/etc
@@ -485,6 +529,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_dirs_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_dirs
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
@@ -509,6 +555,8 @@
 include $(CLEAR_VARS)
 
 LOCAL_MODULE := _fs_config_files_system_ext
+LOCAL_LICENSE_KINDS := legacy_restricted
+LOCAL_LICENSE_CONDITIONS := restricted
 LOCAL_MODULE_CLASS := ETC
 LOCAL_INSTALLED_MODULE_STEM := fs_config_files
 LOCAL_MODULE_PATH := $(TARGET_OUT_SYSTEM_EXT)/etc
diff --git a/tools/fs_get_stats/Android.bp b/tools/fs_get_stats/Android.bp
index 67742b8..9457de4 100644
--- a/tools/fs_get_stats/Android.bp
+++ b/tools/fs_get_stats/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 cc_binary_host {
     name: "fs_get_stats",
     srcs: ["fs_get_stats.c"],
diff --git a/tools/libhost/Android.bp b/tools/libhost/Android.bp
index 4c9100f..a83f2e7 100644
--- a/tools/libhost/Android.bp
+++ b/tools/libhost/Android.bp
@@ -1,3 +1,12 @@
+package {
+    // See: http://go/android-license-faq
+    // A large-scale-change added 'default_applicable_licenses' to import
+    // all of the 'license_kinds' from "build_make_license"
+    // to get the below license kinds:
+    //   legacy_restricted
+    default_applicable_licenses: ["build_make_license"],
+}
+
 cc_library_host_static {
 
     srcs: ["CopyFile.c"],
diff --git a/tools/product_config/Android.bp b/tools/product_config/Android.bp
new file mode 100644
index 0000000..5fdbcf0
--- /dev/null
+++ b/tools/product_config/Android.bp
@@ -0,0 +1,27 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+java_defaults {
+    name: "product-config-defaults",
+    srcs: ["src/**/*.java"],
+}
+
+java_binary_host {
+    name: "product-config",
+    defaults: ["product-config-defaults"],
+    manifest: "MANIFEST.MF"
+}
+
+java_test_host {
+    name: "product-config-test",
+    defaults: ["product-config-defaults"],
+    srcs: [
+        "test/**/*.java",
+    ],
+    static_libs: [
+        "junit"
+    ],
+    manifest: "TEST_MANIFEST.MF",
+    test_suites: ["general-tests"]
+}
diff --git a/tools/product_config/MANIFEST.MF b/tools/product_config/MANIFEST.MF
new file mode 100644
index 0000000..db88df3
--- /dev/null
+++ b/tools/product_config/MANIFEST.MF
@@ -0,0 +1,2 @@
+Manifest-Version: 1.0
+Main-Class: com.android.build.config.Main
diff --git a/tools/product_config/TEST_MANIFEST.MF b/tools/product_config/TEST_MANIFEST.MF
new file mode 100644
index 0000000..287a77f
--- /dev/null
+++ b/tools/product_config/TEST_MANIFEST.MF
@@ -0,0 +1,2 @@
+Manifest-Version: 1.0
+Main-Class: com.android.build.config.TestRunner
diff --git a/tools/product_config/TEST_MAPPING b/tools/product_config/TEST_MAPPING
new file mode 100644
index 0000000..d3568f1
--- /dev/null
+++ b/tools/product_config/TEST_MAPPING
@@ -0,0 +1,7 @@
+{
+  "presubmit": [
+    {
+      "name": "product_config_test"
+    }
+  ]
+}
diff --git a/tools/product_config/inherit_tree.py b/tools/product_config/inherit_tree.py
new file mode 100755
index 0000000..ae8a275
--- /dev/null
+++ b/tools/product_config/inherit_tree.py
@@ -0,0 +1,49 @@
+#!/usr/bin/env python3
+
+#
+# Run from the root of the tree, after product-config has been run to see
+# the product inheritance hierarchy for the current lunch target.
+#
+
+import csv
+import sys
+
+def PrintNodes(graph, node, prefix):
+  sys.stdout.write("%s%s" % (prefix, node))
+  children = graph.get(node, [])
+  if children:
+    sys.stdout.write(" {\n")
+    for child in sorted(graph.get(node, [])):
+      PrintNodes(graph, child, prefix + "  ")
+    sys.stdout.write("%s}\n" % prefix);
+  else:
+    sys.stdout.write("\n")
+
+def main(argv):
+  if len(argv) != 2:
+    print("usage: inherit_tree.py out/$TARGET_PRODUCT-$TARGET_BUILD_VARIANT/dumpconfig.csv")
+    sys.exit(1)
+
+  root = None
+  graph = {}
+  with open(argv[1], newline='') as csvfile:
+    for line in csv.reader(csvfile):
+      if not root:
+        # Look for PRODUCTS
+        if len(line) < 3 or line[0] != "phase" or line[1] != "PRODUCTS":
+          continue
+        root = line[2]
+      else:
+        # Everything else
+        if len(line) < 3 or line[0] != "inherit":
+          continue
+        graph.setdefault(line[1], list()).append(line[2])
+
+  PrintNodes(graph, root, "")
+
+
+if __name__ == "__main__":
+  main(sys.argv)
+
+# vim: set expandtab ts=2 sw=2 sts=2:
+
diff --git a/tools/product_config/src/com/android/build/config/CommandException.java b/tools/product_config/src/com/android/build/config/CommandException.java
new file mode 100644
index 0000000..f1a2c39
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/CommandException.java
@@ -0,0 +1,39 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Exception to indicate that a fatal error has occurred.  Throwing this
+ * will cause errors to be printed, cleanup to occur, and the command to
+ * exit with a failure code.
+ *
+ * These are user errors. Throwing other exceptions will result in
+ * the stack trace being shown.
+ */
+public class CommandException extends RuntimeException {
+    public CommandException() {
+        super();
+    }
+
+    public CommandException(String message) {
+        super(message);
+    }
+
+    public CommandException(String message, Throwable chain) {
+        super(message, chain);
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/ConfigBase.java b/tools/product_config/src/com/android/build/config/ConfigBase.java
new file mode 100644
index 0000000..9a81011
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConfigBase.java
@@ -0,0 +1,112 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Common parts between MakeConfig and the to-be-added GenericConfig, BazelConfig and SoongConfig.
+ */
+public class ConfigBase {
+    protected String mPhase;
+    protected List<String> mRootNodes;
+
+    /**
+     * State of the make varaible environment from before the first config file.
+     */
+    protected Map<String, Str> mInitialVariables = new HashMap();
+
+    /**
+     * State of the make varaible environment from after the first config file.
+     */
+    protected Map<String, Str> mFinalVariables = new HashMap();
+
+
+    /**
+     * The variables that are handled specially.
+     */
+    protected final TreeMap<String, VarType> mProductVars = new TreeMap();
+
+    public void setPhase(String phase) {
+        mPhase = phase;
+    }
+
+    public String getPhase() {
+        return mPhase;
+    }
+
+    public void setRootNodes(List<String> filenames) {
+        mRootNodes = new ArrayList(filenames);
+    }
+
+    public List<String> getRootNodes() {
+        return mRootNodes;
+    }
+
+    public void addProductVar(String name, VarType type) {
+        mProductVars.put(name, type);
+    }
+
+    public TreeMap<String, VarType> getProductVars() {
+        return mProductVars;
+    }
+
+    public VarType getVarType(String name) {
+        final VarType t = mProductVars.get(name);
+        if (t != null) {
+            return t;
+        } else {
+            return VarType.UNKNOWN;
+        }
+    }
+
+    public boolean isProductVar(String name) {
+        return mProductVars.get(name) != null;
+    }
+
+    /**
+     * Return the state the make variable environment from before the first config file.
+     */
+    public Map<String, Str> getInitialVariables() {
+        return mInitialVariables;
+    }
+
+    /**
+     * Return the state the make variable environment from before the first config file.
+     */
+    public Map<String, Str> getFinalVariables() {
+        return mFinalVariables;
+    }
+
+    /**
+     * Copy common base class fields from that to this.
+     */
+    public void copyFrom(ConfigBase that) {
+        setPhase(that.getPhase());
+        setRootNodes(that.getRootNodes());
+        for (Map.Entry<String, VarType> entry: that.getProductVars().entrySet()) {
+            addProductVar(entry.getKey(), entry.getValue());
+        }
+        mInitialVariables = new HashMap(that.getInitialVariables());
+        mFinalVariables = new HashMap(that.getFinalVariables());
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
new file mode 100644
index 0000000..39bd5df
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ConvertMakeToGenericConfig.java
@@ -0,0 +1,235 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Converts a MakeConfig into a Generic config by applying heuristics about
+ * the types of variable assignments that we do.
+ */
+public class ConvertMakeToGenericConfig {
+    private final Errors mErrors;
+
+    public ConvertMakeToGenericConfig(Errors errors) {
+        mErrors = errors;
+    }
+
+    public GenericConfig convert(Map<String, MakeConfig> make) {
+        final GenericConfig result = new GenericConfig();
+
+        final MakeConfig products = make.get("PRODUCTS");
+        if (products == null) {
+            mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCTS phase in dumpconfig output.");
+            return null;
+        }
+
+        // Base class fields
+        result.copyFrom(products);
+
+        // Each file
+        for (MakeConfig.ConfigFile f: products.getConfigFiles()) {
+            final GenericConfig.ConfigFile genericFile
+                    = new GenericConfig.ConfigFile(f.getFilename());
+            result.addConfigFile(genericFile);
+
+            final List<MakeConfig.Block> blocks = f.getBlocks();
+
+            // Some assertions:
+            // TODO: Include better context for these errors.
+            // There should always be at least a BEGIN and an AFTER, so assert this.
+            if (blocks.size() < 2) {
+                throw new RuntimeException("expected at least blocks.size() >= 2. Actcual size: "
+                        + blocks.size());
+            }
+            if (blocks.get(0).getBlockType() != MakeConfig.BlockType.BEFORE) {
+                throw new RuntimeException("expected first block to be BEFORE");
+            }
+            if (blocks.get(blocks.size() - 1).getBlockType() != MakeConfig.BlockType.AFTER) {
+                throw new RuntimeException("expected first block to be AFTER");
+            }
+            // Everything in between should be an INHERIT block.
+            for (int index = 1; index < blocks.size() - 1; index++) {
+                if (blocks.get(index).getBlockType() != MakeConfig.BlockType.INHERIT) {
+                    throw new RuntimeException("expected INHERIT at block " + index);
+                }
+            }
+
+            // Each block represents a snapshot of the interpreter variable state (minus a few big
+            // sets of variables which we don't export because they're used in the internals
+            // of node_fns.mk, so we know they're not necessary here). The first (BEFORE) one
+            // is everything that is set before the file is included, so it forms the base
+            // for everything else.
+            MakeConfig.Block prevBlock = blocks.get(0);
+
+            for (int index = 1; index < blocks.size(); index++) {
+                final MakeConfig.Block block = blocks.get(index);
+                for (final Map.Entry<String, Str> entry: block.getVars().entrySet()) {
+                    final String varName = entry.getKey();
+                    final GenericConfig.Assign assign = convertAssignment(block.getBlockType(),
+                            block.getInheritedFile(), products.getVarType(varName), varName,
+                            entry.getValue(), prevBlock.getVar(varName));
+                    if (assign != null) {
+                        genericFile.addStatement(assign);
+                    }
+                }
+                // Handle variables that are in prevBlock but not block -- they were
+                // deleted. Is this even possible, or do they show up as ""?  We will
+                // treat them as positive assigments to empty string
+                for (String prevName: prevBlock.getVars().keySet()) {
+                    if (!block.getVars().containsKey(prevName)) {
+                        genericFile.addStatement(
+                                new GenericConfig.Assign(prevName, new Str("")));
+                    }
+                }
+                if (block.getBlockType() == MakeConfig.BlockType.INHERIT) {
+                    genericFile.addStatement(
+                            new GenericConfig.Inherit(block.getInheritedFile()));
+                }
+                // For next iteration
+                prevBlock = block;
+            }
+        }
+
+        // Overwrite the final variables with the ones that come from the PRODUCTS-EXPAND phase.
+        // Drop the ones that were newly defined between the two phases, but leave values
+        // that were modified between.  We do need to reproduce that logic in this tool.
+        final MakeConfig expand = make.get("PRODUCT-EXPAND");
+        if (expand == null) {
+            mErrors.ERROR_DUMPCONFIG.add("Could not find PRODUCT-EXPAND phase in dumpconfig"
+                    + " output.");
+            return null;
+        }
+        final Map<String, Str> productsFinal = products.getFinalVariables();
+        final Map<String, Str> expandInitial = expand.getInitialVariables();
+        final Map<String, Str> expandFinal = expand.getFinalVariables();
+        final Map<String, Str> finalFinal = result.getFinalVariables();
+        finalFinal.clear();
+        for (Map.Entry<String, Str> var: expandFinal.entrySet()) {
+            final String varName = var.getKey();
+            if (expandInitial.containsKey(varName) && !productsFinal.containsKey(varName)) {
+                continue;
+            }
+            finalFinal.put(varName, var.getValue());
+        }
+
+        return result;
+    }
+
+    /**
+     * Converts one variable from a MakeConfig Block into a GenericConfig Assignment.
+     */
+    GenericConfig.Assign convertAssignment(MakeConfig.BlockType blockType, Str inheritedFile,
+            VarType varType, String varName, Str varVal, Str prevVal) {
+        if (prevVal == null) {
+            // New variable.
+            return new GenericConfig.Assign(varName, varVal);
+        } else if (!varVal.equals(prevVal)) {
+            // The value changed from the last block.
+            if (varVal.length() == 0) {
+                // It was set to empty
+                return new GenericConfig.Assign(varName, varVal);
+            } else {
+                // Product vars have the @inherit processing. Other vars we
+                // will just ignore and put in one section at the end, based
+                // on the difference between the BEFORE and AFTER blocks.
+                if (varType == VarType.UNKNOWN) {
+                    if (blockType == MakeConfig.BlockType.AFTER) {
+                        // For UNKNOWN variables, we don't worry about the
+                        // intermediate steps, just take the final value.
+                        return new GenericConfig.Assign(varName, varVal);
+                    } else {
+                        return null;
+                    }
+                } else {
+                    return convertInheritedVar(blockType, inheritedFile,
+                            varName, varVal, prevVal);
+                }
+            }
+        } else {
+            // Variable not touched
+            return null;
+        }
+    }
+
+    /**
+     * Handle the special inherited values, where the inherit-product puts in the
+     * @inherit:... markers, adding Statements to the ConfigFile.
+     */
+    GenericConfig.Assign convertInheritedVar(MakeConfig.BlockType blockType, Str inheritedFile,
+            String varName, Str varVal, Str prevVal) {
+        String varText = varVal.toString();
+        String prevText = prevVal.toString().trim();
+        if (blockType == MakeConfig.BlockType.INHERIT) {
+            // inherit-product appends @inherit:... so drop that.
+            final String marker = "@inherit:" + inheritedFile;
+            if (varText.endsWith(marker)) {
+                varText = varText.substring(0, varText.length() - marker.length()).trim();
+            } else {
+                mErrors.ERROR_IMPROPER_PRODUCT_VAR_MARKER.add(varVal.getPosition(),
+                        "Variable didn't end with marker \"" + marker + "\": " + varText);
+            }
+        }
+
+        if (!varText.equals(prevText)) {
+            // If the variable value was actually changed.
+            final ArrayList<String> words = split(varText, prevText);
+            if (words.size() == 0) {
+                // Pure Assignment, none of the previous value is present.
+                return new GenericConfig.Assign(varName, new Str(varVal.getPosition(), varText));
+            } else {
+                // Self referential value (prepend, append, both).
+                if (words.size() > 2) {
+                    // This is indicative of a construction that might not be quite
+                    // what we want.  The above code will do something that works if it was
+                    // of the form "VAR := a $(VAR) b $(VAR) c", but if the original code
+                    // something else this won't work. This doesn't happen in AOSP, but
+                    // it's a theoretically possibility, so someone might do it.
+                    mErrors.WARNING_VARIABLE_RECURSION.add(varVal.getPosition(),
+                            "Possible unsupported variable recursion: "
+                                + varName + " = " + varVal + " (prev=" + prevVal + ")");
+                }
+                return new GenericConfig.Assign(varName, Str.toList(varVal.getPosition(), words));
+            }
+        } else {
+            // Variable not touched
+            return null;
+        }
+    }
+
+    /**
+     * Split 'haystack' on occurrences of 'needle'. Trims each string of whitespace
+     * to preserve make list semantics.
+     */
+    private static ArrayList<String> split(String haystack, String needle) {
+        final ArrayList<String> result = new ArrayList();
+        final int needleLen = needle.length();
+        if (needleLen == 0) {
+            return result;
+        }
+        int start = 0;
+        int end;
+        while ((end = haystack.indexOf(needle, start)) >= 0) {
+            result.add(haystack.substring(start, end).trim());
+            start = end + needleLen;
+        }
+        result.add(haystack.substring(start).trim());
+        return result;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/CsvParser.java b/tools/product_config/src/com/android/build/config/CsvParser.java
new file mode 100644
index 0000000..1c8b9c3
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/CsvParser.java
@@ -0,0 +1,242 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.Iterator;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * A CSV parser.
+ */
+public class CsvParser {
+    /**
+     * Internal string buffer grows by this amount.
+     */
+    private static final int CHUNK_SIZE = 64 * 1024;
+
+    /**
+     * Error parsing.
+     */
+    public static class ParseException extends Exception {
+        private int mLine;
+        private int mColumn;
+
+        public ParseException(int line, int column, String message) {
+            super(message);
+            mLine = line;
+            mColumn = column;
+        }
+
+        /**
+         * Line number in source file.
+         */
+        public int getLine() {
+            return mLine;
+        }
+
+        /**
+         * Column in source file.
+         */
+        public int getColumn() {
+            return mColumn;
+        }
+    }
+
+    public static class Line {
+        private final int mLineNumber;
+        private final List<String> mFields;
+
+        Line(int lineno, List<String> fields) {
+            mLineNumber = lineno;
+            mFields = fields;
+        }
+
+        public int getLine() {
+            return mLineNumber;
+        }
+
+        public List<String> getFields() {
+            return mFields;
+        }
+    }
+
+    // Parser States
+    private static final int STATE_START_LINE = 0;
+    private static final int STATE_START_FIELD = 1;
+    private static final int STATE_INSIDE_QUOTED_FIELD = 2;
+    private static final int STATE_FIRST_QUOTATION_MARK = 3;
+    private static final int STATE_INSIDE_UNQUOTED_FIELD = 4;
+    private static final int STATE_DONE = 5;
+
+    // Parser Actions
+    private static final int ACTION_APPEND_CHAR = 1;
+    private static final int ACTION_FIELD_COMPLETE = 2;
+    private static final int ACTION_LINE_COMPLETE = 4;
+
+    /**
+     * Constructor.
+     */
+    private CsvParser() {
+    }
+
+    /**
+     * Reads CSV and returns a list of Line objects.
+     *
+     * Handles newlines inside fields quoted with double quotes (").
+     *
+     * Doesn't report blank lines, but does include empty fields.
+     */
+    public static List<Line> parse(Reader reader)
+            throws ParseException, IOException {
+        ArrayList<Line> result = new ArrayList();
+        int line = 1;
+        int column = 1;
+        int pos = 0;
+        char[] buf = new char[CHUNK_SIZE];
+        HashMap<String,String> stringPool = new HashMap();
+        ArrayList<String> fields = new ArrayList();
+
+        int state = STATE_START_LINE;
+        while (state != STATE_DONE) {
+            int c = reader.read();
+            int action = 0;
+
+            if (state == STATE_START_LINE) {
+                if (c <= 0) {
+                    // No data, skip ACTION_LINE_COMPLETE.
+                    state = STATE_DONE;
+                } else if (c == '"') {
+                    state = STATE_INSIDE_QUOTED_FIELD;
+                } else if (c == ',') {
+                    action = ACTION_FIELD_COMPLETE;
+                    state = STATE_START_FIELD;
+                } else if (c == '\n') {
+                    // Consume the newline, state stays STATE_START_LINE.
+                } else {
+                    action = ACTION_APPEND_CHAR;
+                    state = STATE_INSIDE_UNQUOTED_FIELD;
+                }
+            } else if (state == STATE_START_FIELD) {
+                if (c <= 0) {
+                    // Field will be empty
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_DONE;
+                } else if (c == '"') {
+                    state = STATE_INSIDE_QUOTED_FIELD;
+                } else if (c == ',') {
+                    action = ACTION_FIELD_COMPLETE;
+                    state = STATE_START_FIELD;
+                } else if (c == '\n') {
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_START_LINE;
+                } else {
+                    action = ACTION_APPEND_CHAR;
+                    state = STATE_INSIDE_UNQUOTED_FIELD;
+                }
+            } else if (state == STATE_INSIDE_QUOTED_FIELD) {
+                if (c <= 0) {
+                    throw new ParseException(line, column,
+                            "Bad input: End of input inside quoted field.");
+                } else if (c == '"') {
+                    state = STATE_FIRST_QUOTATION_MARK;
+                } else {
+                    action = ACTION_APPEND_CHAR;
+                }
+            } else if (state == STATE_FIRST_QUOTATION_MARK) {
+                if (c <= 0) {
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_DONE;
+                } else if (c == '"') {
+                    action = ACTION_APPEND_CHAR;
+                    state = STATE_INSIDE_QUOTED_FIELD;
+                } else if (c == ',') {
+                    action = ACTION_FIELD_COMPLETE;
+                    state = STATE_START_FIELD;
+                } else if (c == '\n') {
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_START_LINE;
+                } else {
+                    throw new ParseException(line, column,
+                            "Bad input: Character after field ended or unquoted '\"'.");
+                }
+            } else if (state == STATE_INSIDE_UNQUOTED_FIELD) {
+                if (c <= 0) {
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_DONE;
+                } else if (c == ',') {
+                    action = ACTION_FIELD_COMPLETE;
+                    state = STATE_START_FIELD;
+                } else if (c == '\n') {
+                    action = ACTION_FIELD_COMPLETE | ACTION_LINE_COMPLETE;
+                    state = STATE_START_LINE;
+                } else {
+                    action = ACTION_APPEND_CHAR;
+                }
+            }
+
+            if ((action & ACTION_APPEND_CHAR) != 0) {
+                // Reallocate buffer if necessary. Hopefully not often because CHUNK_SIZE is big.
+                if (pos >= buf.length) {
+                    char[] old = buf;
+                    buf = new char[old.length + CHUNK_SIZE];
+                    System.arraycopy(old, 0, buf, 0, old.length);
+                }
+                // Store the character
+                buf[pos] = (char)c;
+                pos++;
+            }
+            if ((action & ACTION_FIELD_COMPLETE) != 0) {
+                // A lot of the strings are duplicated, so pool them to reduce peak memory
+                // usage. This could be made slightly better by having a custom key class
+                // that does the lookup without making a new String that gets immediately
+                // thrown away.
+                String field = new String(buf, 0, pos);
+                final String cached = stringPool.get(field);
+                if (cached == null) {
+                    stringPool.put(field, field);
+                } else {
+                    field = cached;
+                }
+                fields.add(field);
+                pos = 0;
+            }
+            if ((action & ACTION_LINE_COMPLETE) != 0) {
+                // Only report lines with any contents
+                if (fields.size() > 0) {
+                    result.add(new Line(line, fields));
+                    fields = new ArrayList();
+                }
+            }
+
+            if (c == '\n') {
+                line++;
+                column = 1;
+            } else {
+                column++;
+            }
+        }
+
+        return result;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/DumpConfigParser.java b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
new file mode 100644
index 0000000..c4cd963
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/DumpConfigParser.java
@@ -0,0 +1,322 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import java.io.IOException;
+import java.io.Reader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Map;
+import java.util.regex.Pattern;
+
+/**
+ * Parses the output of ckati building build/make/core/dumpconfig.mk.
+ *
+ * The format is as follows:
+ *   - All processed lines are colon (':') separated fields.
+ *   - Lines before the dumpconfig_version line are dropped for forward compatibility
+ *   - Lines where the first field is config_var describe variables declared in makefiles
+ *     (implemented by the dump-config-vals macro)
+ *          Field   Description
+ *          0       "config_var" row type
+ *          1       Product makefile being processed
+ *          2       The variable name
+ *          3       The value of the variable
+ *          4       The location of the variable, as best tracked by kati
+ */
+public class DumpConfigParser {
+    private static final boolean DEBUG = false;
+
+    private final Errors mErrors;
+    private final String mFilename;
+    private final Reader mReader;
+
+    private final Map<String,MakeConfig> mResults = new HashMap();
+
+    private static final Pattern LIST_SEPARATOR = Pattern.compile("\\s+");
+
+    /**
+     * Constructor.
+     */
+    private DumpConfigParser(Errors errors, String filename, Reader reader) {
+        mErrors = errors;
+        mFilename = filename;
+        mReader = reader;
+    }
+
+    /**
+     * Parse the text into a map of the phase names to MakeConfig objects.
+     */
+    public static Map<String,MakeConfig> parse(Errors errors, String filename, Reader reader)
+            throws CsvParser.ParseException, IOException {
+        DumpConfigParser parser = new DumpConfigParser(errors, filename, reader);
+        parser.parseImpl();
+        return parser.mResults;
+    }
+
+    /**
+     * Parse the input.
+     */
+    private void parseImpl() throws CsvParser.ParseException, IOException {
+        final List<CsvParser.Line> lines = CsvParser.parse(mReader);
+        final int lineCount = lines.size();
+        int index = 0;
+
+        int dumpconfigVersion = 0;
+
+        // Ignore lines until until we get a dumpconfig_version line for forward compatibility.
+        // In a previous life, this loop parsed from all of kati's stdout, not just the file
+        // that dumpconfig.mk writes, but it's harmless to leave this loop in.  It gives us a
+        // little bit of flexibility which we probably won't need anyway, this tool probably
+        // won't diverge from dumpconfig.mk anyway.
+        for (; index < lineCount; index++) {
+            final CsvParser.Line line = lines.get(index);
+            final List<String> fields = line.getFields();
+
+            if (matchLineType(line, "dumpconfig_version", 1)) {
+                try {
+                    dumpconfigVersion = Integer.parseInt(fields.get(1));
+                } catch (NumberFormatException ex) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "Couldn't parse dumpconfig_version: " + fields.get(1));
+                }
+                break;
+            }
+        }
+
+        // If we never saw dumpconfig_version, there's a problem with the command, so stop.
+        if (dumpconfigVersion == 0) {
+            mErrors.ERROR_DUMPCONFIG.fatal(
+                    new Position(mFilename),
+                    "Never saw a valid dumpconfig_version line.");
+        }
+
+        // Any lines before the start signal will be dropped. We create garbage objects
+        // here to avoid having to check for null everywhere.
+        MakeConfig makeConfig = new MakeConfig();
+        MakeConfig.ConfigFile configFile = new MakeConfig.ConfigFile("<ignored>");
+        MakeConfig.Block block = new MakeConfig.Block(MakeConfig.BlockType.UNSET);
+        Map<String, Str> initialVariables = new HashMap();
+        Map<String, Str> finalVariables = new HashMap();
+
+        // Number of "phases" we've seen so far.
+        for (; index < lineCount; index++) {
+            final CsvParser.Line line = lines.get(index);
+            final List<String> fields = line.getFields();
+            final String lineType = fields.get(0);
+
+            if (matchLineType(line, "phase", 2)) {
+                // Start the new one
+                makeConfig = new MakeConfig();
+                makeConfig.setPhase(fields.get(1));
+                makeConfig.setRootNodes(splitList(fields.get(2)));
+                // If there is a duplicate phase of the same name, continue parsing, but
+                // don't add it.  Emit a warning.
+                if (!mResults.containsKey(makeConfig.getPhase())) {
+                    mResults.put(makeConfig.getPhase(), makeConfig);
+                } else {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "Duplicate phase: " + makeConfig.getPhase()
+                                + ". This one will be dropped.");
+                }
+                initialVariables = makeConfig.getInitialVariables();
+                finalVariables = makeConfig.getFinalVariables();
+
+                if (DEBUG) {
+                    System.out.println("PHASE:");
+                    System.out.println("  " + makeConfig.getPhase());
+                    System.out.println("  " + makeConfig.getRootNodes());
+                }
+            } else if (matchLineType(line, "var", 2)) {
+                final VarType type = "list".equals(fields.get(1)) ? VarType.LIST : VarType.SINGLE;
+                makeConfig.addProductVar(fields.get(2), type);
+
+                if (DEBUG) {
+                    System.out.println("  VAR: " + type + " " + fields.get(2));
+                }
+            } else if (matchLineType(line, "import", 1)) {
+                final List<String> importStack = splitList(fields.get(1));
+                if (importStack.size() == 0) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "'import' line with empty include stack.");
+                    continue;
+                }
+
+                // The beginning of importing a new file.
+                configFile = new MakeConfig.ConfigFile(importStack.get(0));
+                if (makeConfig.addConfigFile(configFile) != null) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "Duplicate file imported in section: " + configFile.getFilename());
+                }
+                // We expect a Variable block next.
+                block = new MakeConfig.Block(MakeConfig.BlockType.BEFORE);
+                configFile.addBlock(block);
+
+                if (DEBUG) {
+                    System.out.println("  IMPORT: " + configFile.getFilename());
+                }
+            } else if (matchLineType(line, "inherit", 2)) {
+                final String currentFile = fields.get(1);
+                final String inheritedFile = fields.get(2);
+                if (!configFile.getFilename().equals(currentFile)) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "Unexpected current file in 'inherit' line '" + currentFile
+                                + "' while processing '" + configFile.getFilename() + "'");
+                    continue;
+                }
+
+                // There is already a file in progress, so add another var block to that.
+                block = new MakeConfig.Block(MakeConfig.BlockType.INHERIT);
+                // TODO: Make dumpconfig.mk also output a Position for inherit-product
+                block.setInheritedFile(new Str(inheritedFile));
+                configFile.addBlock(block);
+
+                if (DEBUG) {
+                    System.out.println("  INHERIT: " + inheritedFile);
+                }
+            } else if (matchLineType(line, "imported", 1)) {
+                final List<String> importStack = splitList(fields.get(1));
+                if (importStack.size() == 0) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "'imported' line with empty include stack.");
+                    continue;
+                }
+                final String currentFile = importStack.get(0);
+                if (!configFile.getFilename().equals(currentFile)) {
+                    mErrors.WARNING_DUMPCONFIG.add(
+                            new Position(mFilename, line.getLine()),
+                            "Unexpected current file in 'imported' line '" + currentFile
+                                + "' while processing '" + configFile.getFilename() + "'");
+                    continue;
+                }
+
+                // There is already a file in progress, so add another var block to that.
+                // This will be the last one, but will check that after parsing.
+                block = new MakeConfig.Block(MakeConfig.BlockType.AFTER);
+                configFile.addBlock(block);
+
+                if (DEBUG) {
+                    System.out.println("  AFTER: " + currentFile);
+                }
+            } else if (matchLineType(line, "val", 5)) {
+                final String productMakefile = fields.get(1);
+                final String blockTypeString = fields.get(2);
+                final String varName = fields.get(3);
+                final String varValue = fields.get(4);
+                final Position pos = Position.parse(fields.get(5));
+                final Str str = new Str(pos, varValue);
+
+                if (blockTypeString.equals("initial")) {
+                    initialVariables.put(varName, str);
+                } else if (blockTypeString.equals("final")) {
+                    finalVariables.put(varName, str);
+                } else {
+                    if (!productMakefile.equals(configFile.getFilename())) {
+                        mErrors.WARNING_DUMPCONFIG.add(
+                                new Position(mFilename, line.getLine()),
+                                "Mismatched 'val' product makefile."
+                                    + " Expected: " + configFile.getFilename()
+                                    + " Saw: " + productMakefile);
+                        continue;
+                    }
+
+                    final MakeConfig.BlockType blockType = parseBlockType(line, blockTypeString);
+                    if (blockType == null) {
+                        continue;
+                    }
+                    if (blockType != block.getBlockType()) {
+                        mErrors.WARNING_DUMPCONFIG.add(
+                                new Position(mFilename, line.getLine()),
+                                "Mismatched 'val' block type."
+                                    + " Expected: " + block.getBlockType()
+                                    + " Saw: " + blockType);
+                    }
+
+                    // Add the variable to the block in progress
+                    block.addVar(varName, str);
+                }
+            } else {
+                if (DEBUG) {
+                    System.out.print("# ");
+                    for (int d = 0; d < fields.size(); d++) {
+                        System.out.print(fields.get(d));
+                        if (d != fields.size() - 1) {
+                            System.out.print(",");
+                        }
+                    }
+                    System.out.println();
+                }
+            }
+        }
+    }
+
+    /**
+     * Return true if the line type matches 'lineType' and there are at least 'fieldCount'
+     * fields (not including the first field which is the line type).
+     */
+    private boolean matchLineType(CsvParser.Line line, String lineType, int fieldCount) {
+        final List<String> fields = line.getFields();
+        if (!lineType.equals(fields.get(0))) {
+            return false;
+        }
+        if (fields.size() < (fieldCount + 1)) {
+            mErrors.WARNING_DUMPCONFIG.add(new Position(mFilename, line.getLine()),
+                    fields.get(0) + " line has " + fields.size() + " fields. Expected at least "
+                    + (fieldCount + 1) + " fields.");
+            return false;
+        }
+        return true;
+    }
+
+    /**
+     * Split a string with space separated items (i.e. the make list format) into a List<String>.
+     */
+    private static List<String> splitList(String text) {
+        // Arrays.asList returns a fixed-length List, so we copy it into an ArrayList to not
+        // propagate that surprise detail downstream.
+        return new ArrayList(Arrays.asList(LIST_SEPARATOR.split(text.trim())));
+    }
+
+    /**
+     * Parse a BockType or issue a warning if it can't be parsed.
+     */
+    private MakeConfig.BlockType parseBlockType(CsvParser.Line line, String text) {
+        if ("before".equals(text)) {
+            return MakeConfig.BlockType.BEFORE;
+        } else if ("inherit".equals(text)) {
+            return MakeConfig.BlockType.INHERIT;
+        } else if ("after".equals(text)) {
+            return MakeConfig.BlockType.AFTER;
+        } else {
+            mErrors.WARNING_DUMPCONFIG.add(
+                    new Position(mFilename, line.getLine()),
+                    "Invalid block type: " + text);
+            return null;
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/ErrorReporter.java b/tools/product_config/src/com/android/build/config/ErrorReporter.java
new file mode 100644
index 0000000..0a0c9f4
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/ErrorReporter.java
@@ -0,0 +1,303 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.lang.reflect.Field;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Base class for reporting errors.
+ */
+public class ErrorReporter {
+    /**
+     * List of Entries that have occurred.
+     */
+    // Also used as the lock for this object.
+    private final ArrayList<Entry> mEntries = new ArrayList();
+
+    /**
+     * The categories that are for this Errors object.
+     */
+    private Map<Integer, Category> mCategories;
+
+    /**
+     * Whether there has been a warning or an error yet.
+     */
+    private boolean mHadWarningOrError;
+
+    /**
+     * Whether there has been an error yet.
+     */
+    private boolean mHadError;
+
+    public static class FatalException extends RuntimeException {
+        FatalException(String message) {
+            super(message);
+        }
+
+        FatalException(String message, Throwable chain) {
+            super(message, chain);
+        }
+    }
+
+    /**
+     * Whether errors are errors, warnings or hidden.
+     */
+    public static enum Level {
+        HIDDEN("hidden"),
+        WARNING("warning"),
+        ERROR("error");
+
+        private final String mLabel;
+
+        Level(String label) {
+            mLabel = label;
+        }
+
+        String getLabel() {
+            return mLabel;
+        }
+    }
+
+    /**
+     * The available error codes.
+     */
+    public class Category {
+        private final int mCode;
+        private boolean mIsLevelSettable;
+        private Level mLevel;
+        private String mHelp;
+
+        /**
+         * Construct a Category object.
+         */
+        public Category(int code, boolean isLevelSettable, Level level, String help) {
+            if (!isLevelSettable && level != Level.ERROR) {
+                throw new RuntimeException("Don't have WARNING or HIDDEN without isLevelSettable");
+            }
+            mCode = code;
+            mIsLevelSettable = isLevelSettable;
+            mLevel = level;
+            mHelp = help;
+        }
+
+        /**
+         * Get the numeric code for the Category, which can be used to set the level.
+         */
+        public int getCode() {
+            return mCode;
+        }
+
+        /**
+         * Get whether the level of this Category can be changed.
+         */
+        public boolean isLevelSettable() {
+            return mIsLevelSettable;
+        }
+
+        /**
+         * Set the level of this category.
+         */
+        public void setLevel(Level level) {
+            if (!mIsLevelSettable) {
+                throw new RuntimeException("Can't set level for error " + mCode);
+            }
+            mLevel = level;
+        }
+
+        /**
+         * Return the level, including any overrides.
+         */
+        public Level getLevel() {
+            return mLevel;
+        }
+
+        /**
+         * Return the category's help text.
+         */
+        public String getHelp() {
+            return mHelp;
+        }
+
+        /**
+         * Add an error with no source position.
+         */
+        public void add(String message) {
+            ErrorReporter.this.add(this, false, new Position(), message);
+        }
+
+        /**
+         * Add an error.
+         */
+        public void add(Position pos, String message) {
+            ErrorReporter.this.add(this, false, pos, message);
+        }
+
+        /**
+         * Add an error with no source position, and throw a FatalException, stopping processing
+         * immediately.
+         */
+        public void fatal(String message) {
+            ErrorReporter.this.add(this, true, new Position(), message);
+        }
+
+        /**
+         * Add an error, and throw a FatalException, stopping processing immediately.
+         */
+        public void fatal(Position pos, String message) {
+            ErrorReporter.this.add(this, true, pos, message);
+        }
+    }
+
+    /**
+     * An instance of an error happening.
+     */
+    public static class Entry {
+        private final Category mCategory;
+        private final Position mPosition;
+        private final String mMessage;
+
+        Entry(Category category, Position position, String message) {
+            mCategory = category;
+            mPosition = position;
+            mMessage = message;
+        }
+
+        public Category getCategory() {
+            return mCategory;
+        }
+
+        public Position getPosition() {
+            return mPosition;
+        }
+
+        public String getMessage() {
+            return mMessage;
+        }
+
+        @Override
+        public String toString() {
+            return mPosition
+                    + "[" + mCategory.getLevel().getLabel() + " " + mCategory.getCode() + "] "
+                    + mMessage;
+        }
+    }
+
+    private void initLocked() {
+        if (mCategories == null) {
+            HashMap<Integer, Category> categories = new HashMap();
+            for (Field field: getClass().getFields()) {
+                if (Category.class.isAssignableFrom(field.getType())) {
+                    Category category = null;
+                    try {
+                        category = (Category)field.get(this);
+                    } catch (IllegalAccessException ex) {
+                        // Wrap and rethrow, this is always on this class, so it's
+                        // our programming error if this happens.
+                        throw new RuntimeException("Categories on Errors should be public.", ex);
+                    }
+                    Category prev = categories.put(category.getCode(), category);
+                    if (prev != null) {
+                        throw new RuntimeException("Duplicate categories with code "
+                                + category.getCode());
+                    }
+                }
+            }
+            mCategories = Collections.unmodifiableMap(categories);
+        }
+    }
+
+    /**
+     * Returns a map of the category codes to the categories.
+     */
+    public Map<Integer, Category> getCategories() {
+        synchronized (mEntries) {
+            initLocked();
+            return mCategories;
+        }
+    }
+
+    /**
+     * Add an error.
+     */
+    private void add(Category category, boolean fatal, Position pos, String message) {
+        synchronized (mEntries) {
+            initLocked();
+            if (mCategories.get(category.getCode()) != category) {
+                throw new RuntimeException("Errors.Category used from the wrong Errors object.");
+            }
+            final Entry entry = new Entry(category, pos, message);
+            mEntries.add(entry);
+            final Level level = category.getLevel();
+            if (level == Level.WARNING || level == Level.ERROR) {
+                mHadWarningOrError = true;
+            }
+            if (level == Level.ERROR) {
+                mHadError = true;
+            }
+            if (fatal) {
+                throw new FatalException(entry.toString());
+            }
+        }
+    }
+
+    /**
+     * Returns whether there has been a warning or an error yet.
+     */
+    public boolean hadWarningOrError() {
+        synchronized (mEntries) {
+            return mHadWarningOrError;
+        }
+    }
+
+    /**
+     * Returns whether there has been an error yet.
+     */
+    public boolean hadError() {
+        synchronized (mEntries) {
+            return mHadError;
+        }
+    }
+
+    /**
+     * Returns a list of all entries that were added.
+     */
+    public List<Entry> getEntries() {
+        synchronized (mEntries) {
+            return new ArrayList<Entry>(mEntries);
+        }
+    }
+
+    /**
+     * Prints the errors.
+     */
+    public void printErrors(PrintStream out) {
+        synchronized (mEntries) {
+            for (Entry entry: mEntries) {
+                if (entry.getCategory().getLevel() == Level.HIDDEN) {
+                    continue;
+                }
+                out.println(entry.toString());
+            }
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Errors.java b/tools/product_config/src/com/android/build/config/Errors.java
new file mode 100644
index 0000000..b333e78
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Errors.java
@@ -0,0 +1,74 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.lang.reflect.Field;
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+/**
+ * Error constants and error reporting.
+ * <p>
+ * <b>Naming Convention:</b>
+ * <ul>
+ *  <li>ERROR_ for Categories with isLevelSettable false and Level.ERROR
+ *  <li>WARNING_ for Categories with isLevelSettable true and default WARNING or HIDDEN
+ *  <li>Don't have isLevelSettable true and not ERROR. (The constructor asserts this).
+ * </ul>
+ */
+public class Errors extends ErrorReporter {
+
+    public final Category ERROR_COMMAND_LINE = new Category(1, false, Level.ERROR,
+            "Error on the command line.");
+
+    public final Category WARNING_UNKNOWN_COMMAND_LINE_ERROR = new Category(2, true, Level.HIDDEN,
+            "Passing unknown errors on the command line.  Hidden by default for\n"
+            + "forward compatibility.");
+
+    public final Category ERROR_KATI = new Category(3, false, Level.ERROR,
+            "Error executing or reading from Kati.");
+
+    public final Category WARNING_DUMPCONFIG = new Category(4, true, Level.WARNING,
+            "Anomaly parsing the output of kati and dumpconfig.mk.");
+
+    public final Category ERROR_DUMPCONFIG = new Category(5, false, Level.ERROR,
+            "Error parsing the output of kati and dumpconfig.mk.");
+
+    public final Category WARNING_VARIABLE_RECURSION = new Category(6, true, Level.WARNING,
+            "Possible unsupported variable recursion.");
+
+    // This could be a warning, but it's very likely that the data is corrupted somehow
+    // if we're seeing this.
+    public final Category ERROR_IMPROPER_PRODUCT_VAR_MARKER = new Category(7, true, Level.ERROR,
+            "Bad input from dumpvars causing corrupted product variables.");
+
+    public final Category ERROR_MISSING_CONFIG_FILE = new Category(8, true, Level.ERROR,
+            "Unable to find config file.");
+
+    public final Category ERROR_INFINITE_RECURSION = new Category(9, true, Level.ERROR,
+            "A file tries to inherit-product from itself or its own inherited products.");
+
+    // TODO: This will become obsolete when it is possible to have starlark-based product
+    // config files.
+    public final Category WARNING_DIFFERENT_FROM_KATI = new Category(1000, true, Level.WARNING,
+            "The cross-check with the original kati implementation failed.");
+
+}
diff --git a/tools/product_config/src/com/android/build/config/FlatConfig.java b/tools/product_config/src/com/android/build/config/FlatConfig.java
new file mode 100644
index 0000000..6f277fe
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlatConfig.java
@@ -0,0 +1,36 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Flattened configuration -- set of variables after all assignments and inherits have
+ * been executed.
+ */
+public class FlatConfig extends ConfigBase {
+
+    private final TreeMap<String, Value> mValues = new TreeMap();
+
+    public TreeMap<String, Value> getValues() {
+        return mValues;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/FlattenConfig.java b/tools/product_config/src/com/android/build/config/FlattenConfig.java
new file mode 100644
index 0000000..a19802b
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/FlattenConfig.java
@@ -0,0 +1,474 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+import java.util.TreeMap;
+import java.util.TreeSet;
+import java.util.regex.Pattern;
+
+public class FlattenConfig {
+    private static final Pattern RE_SPACE = Pattern.compile("\\p{Space}+");
+    private static final String PRODUCTS_PREFIX = "PRODUCTS";
+
+    private final Errors mErrors;
+    private final GenericConfig mGenericConfig;
+    private final Map<String, GenericConfig.ConfigFile> mGenericConfigs;
+    private final FlatConfig mResult = new FlatConfig();
+    private final Map<String, Value> mVariables;
+    /**
+     * Files that have been visited, to prevent infinite recursion. There are no
+     * conditionals at this point in the processing, so we don't need a stack, just
+     * a single set.
+     */
+    private final Set<Str> mStack = new HashSet();
+
+
+    private FlattenConfig(Errors errors, GenericConfig genericConfig) {
+        mErrors = errors;
+        mGenericConfig = genericConfig;
+        mGenericConfigs = genericConfig.getFiles();
+        mVariables = mResult.getValues();
+
+        // Base class fields
+        mResult.copyFrom(genericConfig);
+    }
+
+    /**
+     * Flatten a GenericConfig to a FlatConfig.
+     *
+     * Makes three passes through the genericConfig, one to flatten the single variables,
+     * one to flatten the list variables, and one to flatten the unknown variables. Each
+     * has a slightly different algorithm.
+     */
+    public static FlatConfig flatten(Errors errors, GenericConfig genericConfig) {
+        final FlattenConfig flattener = new FlattenConfig(errors, genericConfig);
+        return flattener.flattenImpl();
+    }
+
+    private FlatConfig flattenImpl() {
+        final List<String> rootNodes = mGenericConfig.getRootNodes();
+        if (rootNodes.size() == 0) {
+            mErrors.ERROR_DUMPCONFIG.add("No root nodes in PRODUCTS phase.");
+            return null;
+        } else if (rootNodes.size() != 1) {
+            final StringBuilder msg = new StringBuilder(
+                    "Ignoring extra root nodes in PRODUCTS phase. All nodes are:");
+            for (final String rn: rootNodes) {
+                msg.append(' ');
+                msg.append(rn);
+            }
+            mErrors.WARNING_DUMPCONFIG.add(msg.toString());
+        }
+        final String root = rootNodes.get(0);
+
+        // TODO: Do we need to worry about the initial state of variables? Anything
+        // that from the product config
+
+        flattenListVars(root);
+        flattenSingleVars(root);
+        flattenUnknownVars(root);
+        flattenInheritsFrom(root);
+
+        setDefaultKnownVars();
+
+        // TODO: This only supports the single product mode of import-nodes, which is all the
+        // real build does. m product-graph and friends will have to be rewritten.
+        mVariables.put("PRODUCTS", new Value(VarType.UNKNOWN, new Str(root)));
+
+        return mResult;
+    }
+
+    interface AssignCallback {
+        void onAssignStatement(GenericConfig.Assign assign);
+    }
+
+    interface InheritCallback {
+        void onInheritStatement(GenericConfig.Inherit assign);
+    }
+
+    /**
+     * Do a bunch of validity checks, and then iterate through each of the statements
+     * in the given file.  For Assignments, the callback is only called for variables
+     * matching varType.
+     *
+     * Adds makefiles which have been traversed to the 'seen' set, and will not traverse
+     * into an inherit statement if its makefile has already been seen.
+     */
+    private void forEachStatement(Str filename, VarType varType, Set<String> seen,
+            AssignCallback assigner, InheritCallback inheriter) {
+        if (mStack.contains(filename)) {
+            mErrors.ERROR_INFINITE_RECURSION.add(filename.getPosition(),
+                    "File is already in the inherit-product stack: " + filename);
+            return;
+        }
+
+        mStack.add(filename);
+        try {
+            final GenericConfig.ConfigFile genericFile = mGenericConfigs.get(filename.toString());
+
+            if (genericFile == null) {
+                mErrors.ERROR_MISSING_CONFIG_FILE.add(filename.getPosition(),
+                        "Unable to find config file: " + filename);
+                return;
+            }
+
+            for (final GenericConfig.Statement statement: genericFile.getStatements()) {
+                if (statement instanceof GenericConfig.Assign) {
+                    if (assigner != null) {
+                        final GenericConfig.Assign assign = (GenericConfig.Assign)statement;
+                        final String varName = assign.getName();
+
+                        // Assert that we're not stomping on another variable, which
+                        // really should be impossible at this point.
+                        assertVarType(filename, varName);
+
+                        if (mGenericConfig.getVarType(varName) == varType) {
+                            assigner.onAssignStatement(assign);
+                        }
+                    }
+                } else if (statement instanceof GenericConfig.Inherit) {
+                    if (inheriter != null) {
+                        final GenericConfig.Inherit inherit = (GenericConfig.Inherit)statement;
+                        if (seen != null) {
+                            if (seen.contains(inherit.getFilename().toString())) {
+                                continue;
+                            }
+                            seen.add(inherit.getFilename().toString());
+                        }
+                        inheriter.onInheritStatement(inherit);
+                    }
+                }
+            }
+        } finally {
+            // Also executes after return statements, so we always remove this.
+            mStack.remove(filename);
+        }
+    }
+
+    /**
+     * Call 'inheriter' for each child of 'filename' in alphabetical order.
+     */
+    private void forEachInheritAlpha(final Str filename, VarType varType, Set<String> seen,
+            InheritCallback inheriter) {
+        final TreeMap<Str, GenericConfig.Inherit> alpha = new TreeMap();
+        forEachStatement(filename, varType, null, null,
+                (inherit) -> {
+                    alpha.put(inherit.getFilename(), inherit);
+                });
+        for (final GenericConfig.Inherit inherit: alpha.values()) {
+            // Handle 'seen' here where we actaully call back, not before, so that
+            // the proper traversal order is preserved.
+            if (seen != null) {
+                if (seen.contains(inherit.getFilename().toString())) {
+                    continue;
+                }
+                seen.add(inherit.getFilename().toString());
+            }
+            inheriter.onInheritStatement(inherit);
+        }
+    }
+
+    /**
+     * Traverse the inheritance hierarchy, setting list-value product config variables.
+     */
+    private void flattenListVars(final String filename) {
+        Map<String, Value> vars = flattenListVars(new Str(filename), new HashSet());
+        // Add the result of the recursion to mVariables. We know there will be
+        // no collisions because this function only handles list variables.
+        for (Map.Entry<String, Value> entry: vars.entrySet()) {
+            mVariables.put(entry.getKey(), entry.getValue());
+        }
+    }
+
+    /**
+     * Return the variables defined, recursively, by 'filename.' The 'seen' set
+     * accumulates which nodes have been visited, as each is only done once.
+     *
+     * This convoluted algorithm isn't ideal, but it matches what is in node_fns.mk.
+     */
+    private Map<String, Value> flattenListVars(final Str filename, Set<String> seen) {
+        Map<String, Value> result = new HashMap();
+
+        // Recurse into our children first in alphabetical order, building a map of
+        // that filename to its flattened values.  The order matters here because
+        // we will only look at each child once, and when a file appears multiple
+        // times, its variables must have the right set, based on whether it's been
+        // seen before. This preserves the order from node_fns.mk.
+
+        // Child filename --> { varname --> value }
+        final Map<Str, Map<String, Value>> children = new HashMap();
+        forEachInheritAlpha(filename, VarType.LIST, seen,
+                (inherit) -> {
+                    final Str child = inherit.getFilename();
+                    children.put(child, flattenListVars(child, seen));
+                });
+
+        // Now, traverse the values again in the original source order to concatenate the values.
+        // Note that the contcatenation order is *different* from the inherit order above.
+        forEachStatement(filename, VarType.LIST, null,
+                (assign) -> {
+                    assignToListVar(result, assign.getName(), assign.getValue());
+                },
+                (inherit) -> {
+                    final Map<String, Value> child = children.get(inherit.getFilename());
+                    // child == null happens if this node has been visited before.
+                    if (child != null) {
+                        for (Map.Entry<String, Value> entry: child.entrySet()) {
+                            final String varName = entry.getKey();
+                            final Value varVal = entry.getValue();
+                            appendToListVar(result, varName, varVal.getList());
+                        }
+                    }
+                });
+
+        return result;
+    }
+
+    /**
+     * Traverse the inheritance hierarchy, setting single-value product config variables.
+     */
+    private void flattenSingleVars(final String filename) {
+        flattenSingleVars(new Str(filename), new HashSet(), new HashSet());
+    }
+
+    private void flattenSingleVars(final Str filename, Set<String> seen1, Set<String> seen2) {
+        // flattenSingleVars has two loops.  The first sets all variables that are
+        // defined for *this* file.  The second traverses through the inheritance,
+        // to fill in values that weren't defined in this file.  The first appearance of
+        // the variable is the one that wins.
+
+        forEachStatement(filename, VarType.SINGLE, seen1,
+                (assign) -> {
+                    final String varName = assign.getName();
+                    Value v = mVariables.get(varName);
+                    // Only take the first value that we see for single variables.
+                    Value value = mVariables.get(varName);
+                    if (!mVariables.containsKey(varName)) {
+                        final List<Str> valueList = assign.getValue();
+                        // There should never be more than one item in this list, because
+                        // SINGLE values should never be appended to.
+                        if (valueList.size() != 1) {
+                            final StringBuilder positions = new StringBuilder("[");
+                            for (Str s: valueList) {
+                                positions.append(s.getPosition());
+                            }
+                            positions.append(" ]");
+                            throw new RuntimeException("Value list found for SINGLE variable "
+                                    + varName + " size=" + valueList.size()
+                                    + "positions=" + positions.toString());
+                        }
+                        mVariables.put(varName,
+                                new Value(VarType.SINGLE,
+                                    valueList.get(0)));
+                    }
+                }, null);
+
+        forEachInheritAlpha(filename, VarType.SINGLE, seen2,
+                (inherit) -> {
+                    flattenSingleVars(inherit.getFilename(), seen1, seen2);
+                });
+    }
+
+    /**
+     * Traverse the inheritance hierarchy and flatten the values
+     */
+    private void flattenUnknownVars(String filename) {
+        flattenUnknownVars(new Str(filename), new HashSet());
+    }
+
+    private void flattenUnknownVars(final Str filename, Set<String> seen) {
+        // flattenUnknownVars has two loops: First to attempt to set the variable from
+        // this file, and then a second loop to handle the inheritance.  This is odd
+        // but it matches the order the files are included in node_fns.mk. The last appearance
+        // of the value is the one that wins.
+
+        forEachStatement(filename, VarType.UNKNOWN, null,
+                (assign) -> {
+                    // Overwrite the current value with whatever is now in the file.
+                    mVariables.put(assign.getName(),
+                            new Value(VarType.UNKNOWN,
+                                flattenAssignList(assign, new Str(""))));
+                }, null);
+
+        forEachInheritAlpha(filename, VarType.UNKNOWN, seen,
+                (inherit) -> {
+                    flattenUnknownVars(inherit.getFilename(), seen);
+                });
+    }
+
+    String prefix = "";
+
+    /**
+     * Sets the PRODUCTS.<filename>.INHERITS_FROM variables.
+     */
+    private void flattenInheritsFrom(final String filename) {
+        flattenInheritsFrom(new Str(filename));
+    }
+
+    /**
+     * This flatten function, unlike the others visits all of the nodes regardless
+     * of whether they have been seen before, because that's what the make code does.
+     */
+    private void flattenInheritsFrom(final Str filename) {
+        // Recurse, and gather the list our chlidren
+        final TreeSet<Str> children = new TreeSet();
+        forEachStatement(filename, VarType.LIST, null, null,
+                (inherit) -> {
+                    children.add(inherit.getFilename());
+                    flattenInheritsFrom(inherit.getFilename());
+                });
+
+        final String varName = "PRODUCTS." + filename + ".INHERITS_FROM";
+        if (children.size() > 0) {
+            // Build the space separated list.
+            boolean first = true;
+            final StringBuilder val = new StringBuilder();
+            for (Str child: children) {
+                if (first) {
+                    first = false;
+                } else {
+                    val.append(' ');
+                }
+                val.append(child);
+            }
+            mVariables.put(varName, new Value(VarType.UNKNOWN, new Str(val.toString())));
+        } else {
+            // Clear whatever flattenUnknownVars happened to have put in.
+            mVariables.remove(varName);
+        }
+    }
+
+    /**
+     * Throw an exception if there's an existing variable with a different type.
+     */
+    private void assertVarType(Str filename, String varName) {
+        if (mGenericConfig.getVarType(varName) == VarType.UNKNOWN) {
+            final Value prevValue = mVariables.get(varName);
+            if (prevValue != null
+                    && prevValue.getVarType() != VarType.UNKNOWN) {
+                throw new RuntimeException("Mismatched var types:"
+                        + " filename=" + filename
+                        + " varType=" + mGenericConfig.getVarType(varName)
+                        + " varName=" + varName
+                        + " prevValue=" + Value.debugString(prevValue));
+            }
+        }
+    }
+
+    /**
+     * Depending on whether the assignment is prepending, appending, setting, etc.,
+     * update the value.  We can infer which of those operations it is by the length
+     * and contents of the values. Each value in the list was originally separated
+     * by the previous value.
+     */
+    private void assignToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+        final Value value = vars.get(varName);
+        final List<Str> orig = value == null ? new ArrayList() : value.getList();
+        final List<Str> result = new ArrayList();
+        if (items.size() > 0) {
+            for (int i = 0; i < items.size(); i++) {
+                if (i != 0) {
+                    result.addAll(orig);
+                }
+                final Str item = items.get(i);
+                addWords(result, item);
+            }
+        }
+        vars.put(varName, new Value(result));
+    }
+
+    /**
+     * Appends all of the words in in 'items' to an entry in vars keyed by 'varName',
+     * creating one if necessary.
+     */
+    private static void appendToListVar(Map<String, Value> vars, String varName, List<Str> items) {
+        Value value = vars.get(varName);
+        if (value == null) {
+            value = new Value(new ArrayList());
+            vars.put(varName, value);
+        }
+        final List<Str> out = value.getList();
+        for (Str item: items) {
+            addWords(out, item);
+        }
+    }
+
+    /**
+     * Split 'item' on spaces, and add each of them as a word to 'out'.
+     */
+    private static void addWords(List<Str> out, Str item) {
+        for (String word: RE_SPACE.split(item.toString().trim())) {
+            if (word.length() > 0) {
+                out.add(new Str(item.getPosition(), word));
+            }
+        }
+    }
+
+    /**
+     * Flatten the list of strings in an Assign statement, using the previous value
+     * as a separator.
+     */
+    private Str flattenAssignList(GenericConfig.Assign assign, Str previous) {
+        final StringBuilder result = new StringBuilder();
+        Position position = previous.getPosition();
+        final List<Str> list = assign.getValue();
+        final int size = list.size();
+        for (int i = 0; i < size; i++) {
+            final Str item = list.get(i);
+            result.append(item.toString());
+            if (i != size - 1) {
+                result.append(previous);
+            }
+            final Position pos = item.getPosition();
+            if (pos != null && pos.getFile() != null) {
+                position = pos;
+            }
+        }
+        return new Str(position, result.toString());
+    }
+
+    /**
+     * Make sure that each of the product config variables has a default value.
+     */
+    private void setDefaultKnownVars() {
+        for (Map.Entry<String, VarType> entry: mGenericConfig.getProductVars().entrySet()) {
+            final String varName = entry.getKey();
+            final VarType varType = entry.getValue();
+
+            final Value val = mVariables.get(varName);
+            if (val == null) {
+                mVariables.put(varName, new Value(varType));
+            }
+        }
+
+
+        // TODO: These two for now as well, until we can rewrite the enforce packages exist
+        // handling.
+        if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST")) {
+            mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST", new Value(VarType.UNKNOWN));
+        }
+        if (!mVariables.containsKey("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST")) {
+            mVariables.put("PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST", new Value(VarType.UNKNOWN));
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/GenericConfig.java b/tools/product_config/src/com/android/build/config/GenericConfig.java
new file mode 100644
index 0000000..2ee2735
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/GenericConfig.java
@@ -0,0 +1,131 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Language-agnostic representation of a configuration statement.
+ */
+public class GenericConfig extends ConfigBase {
+    /**
+     * The config files that were imported in this config pass.
+     */
+    protected final TreeMap<String, ConfigFile> mConfigFiles = new TreeMap();
+
+    /**
+     * A configuration file.
+     */
+    public static class ConfigFile {
+        /**
+         * The name of the file, relative to the tree root.
+         */
+        private final String mFilename;
+
+        /**
+         * Sections of variable definitions and import statements. Product config
+         * files will always have at least one block.
+         */
+        private final ArrayList<Statement> mStatements = new ArrayList();
+
+        public ConfigFile(String filename) {
+            mFilename = filename;
+        }
+
+        public String getFilename() {
+            return mFilename;
+        }
+
+        public void addStatement(Statement statement) {
+            mStatements.add(statement);
+        }
+
+        public ArrayList<Statement> getStatements() {
+            return mStatements;
+        }
+    }
+
+    /**
+     * Base class for statements that appear in config files.
+     */
+    public static class Statement {
+    }
+
+    /**
+     * A variable assignment.
+     */
+    public static class Assign extends Statement {
+        private final String mVarName;
+        private final List<Str> mValue;
+
+        /**
+         * Assignment of a single value
+         */
+        public Assign(String varName, Str value) {
+            mVarName = varName;
+            mValue = new ArrayList();
+            mValue.add(value);
+        }
+
+        /**
+         * Assignment referencing a previous value.
+         *   VAR := $(1) $(VAR) $(2) $(VAR) $(3)
+         */
+        public Assign(String varName, List<Str> value) {
+            mVarName = varName;
+            mValue = value;
+        }
+
+        public String getName() {
+            return mVarName;
+        }
+
+        public List<Str> getValue() {
+            return mValue;
+        }
+    }
+
+    /**
+     * An $(inherit-product FILENAME) statement
+     */
+    public static class Inherit extends Statement {
+        private final Str mFilename;
+
+        public Inherit(Str filename) {
+            mFilename = filename;
+        }
+
+        public Str getFilename() {
+            return mFilename;
+        }
+    }
+
+    /**
+     * Adds the given config file. Returns any one previously added, or null.
+     */
+    public ConfigFile addConfigFile(ConfigFile file) {
+        return mConfigFiles.put(file.getFilename(), file);
+    }
+
+    public TreeMap<String, ConfigFile> getFiles() {
+        return mConfigFiles;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Kati.java b/tools/product_config/src/com/android/build/config/Kati.java
new file mode 100644
index 0000000..4fa2297
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Kati.java
@@ -0,0 +1,26 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Map;
+
+/**
+ * Wrapper for invoking kati.
+ */
+public interface Kati {
+    public Map<String, MakeConfig> loadProductConfig();
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommand.java b/tools/product_config/src/com/android/build/config/KatiCommand.java
new file mode 100644
index 0000000..f3c71d2
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommand.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.List;
+
+public interface KatiCommand {
+    public static class KatiException extends Exception {
+        private String mStderr;
+
+        public KatiException(List<String> cmd, String stderr) {
+            super("Error running kati: " + Arrays.toString(cmd.toArray()));
+            mStderr = stderr;
+        }
+
+        public String getStderr() {
+            return mStderr;
+        }
+    }
+
+    /**
+     * Run kati directly. Returns stdout data.
+     *
+     * @throws KatiException if there is an error. KatiException will contain
+     * the stderr from the kati invocation.
+     */
+    public String run(String[] args) throws KatiException;
+}
diff --git a/tools/product_config/src/com/android/build/config/KatiCommandImpl.java b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
new file mode 100644
index 0000000..53480d4
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiCommandImpl.java
@@ -0,0 +1,139 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.nio.charset.StandardCharsets;
+
+public class KatiCommandImpl implements KatiCommand {
+    final Errors mErrors;
+    final Options mOptions;
+
+    /**
+     * Runnable that consumes all of an InputStream until EOF, writes the contents
+     * into a StringBuilder, and then closes the stream.
+     */
+    class OutputReader implements Runnable {
+        private final InputStream mStream;
+        private final StringBuilder mOutput;
+
+        OutputReader(InputStream stream, StringBuilder output) {
+            mStream = stream;
+            mOutput = output;
+        }
+
+        @Override
+        public void run() {
+            final char[] buf = new char[16*1024];
+            final InputStreamReader reader = new InputStreamReader(mStream, StandardCharsets.UTF_8);
+            try {
+                int amt;
+                while ((amt = reader.read(buf, 0, buf.length)) >= 0) {
+                    mOutput.append(buf, 0, amt);
+                }
+            } catch (IOException ex) {
+                mErrors.ERROR_KATI.add("Error reading from kati: " + ex.getMessage());
+            } finally {
+                try {
+                    reader.close();
+                } catch (IOException ex) {
+                    // Close doesn't throw
+                }
+            }
+        }
+    }
+
+    public KatiCommandImpl(Errors errors, Options options) {
+        mErrors = errors;
+        mOptions = options;
+    }
+
+    /**
+     * Run kati directly. Returns stdout data.
+     *
+     * @throws KatiException if there is an error. KatiException will contain
+     * the stderr from the kati invocation.
+     */
+    public String run(String[] args) throws KatiException {
+        final ArrayList<String> cmd = new ArrayList();
+        cmd.add(mOptions.getCKatiBin());
+        for (String arg: args) {
+            cmd.add(arg);
+        }
+
+        final ProcessBuilder builder = new ProcessBuilder(cmd);
+        builder.redirectOutput(ProcessBuilder.Redirect.PIPE);
+        builder.redirectError(ProcessBuilder.Redirect.PIPE);
+
+        Process process = null;
+
+        try {
+            process = builder.start();
+        } catch (IOException ex) {
+            throw new KatiException(cmd, "IOException running process: " + ex.getMessage());
+        }
+
+        final StringBuilder stdout = new StringBuilder();
+        final Thread stdoutThread = new Thread(new OutputReader(process.getInputStream(), stdout),
+                "kati_stdout_reader");
+        stdoutThread.start();
+
+        final StringBuilder stderr = new StringBuilder();
+        final Thread stderrThread = new Thread(new OutputReader(process.getErrorStream(), stderr),
+                "kati_stderr_reader");
+        stderrThread.start();
+
+        int returnCode = waitForProcess(process);
+        joinThread(stdoutThread);
+        joinThread(stderrThread);
+
+        if (returnCode != 0) {
+            throw new KatiException(cmd, stderr.toString());
+        }
+
+        return stdout.toString();
+    }
+
+    /**
+     * Wrap Process.waitFor() because it throws InterruptedException.
+     */
+    private static int waitForProcess(Process proc) {
+        while (true) {
+            try {
+                return proc.waitFor();
+            } catch (InterruptedException ex) {
+            }
+        }
+    }
+
+    /**
+     * Wrap Thread.join() because it throws InterruptedException.
+     */
+    private static void joinThread(Thread thread) {
+        while (true) {
+            try {
+                thread.join();
+                return;
+            } catch (InterruptedException ex) {
+            }
+        }
+    }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/KatiImpl.java b/tools/product_config/src/com/android/build/config/KatiImpl.java
new file mode 100644
index 0000000..de11f36
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/KatiImpl.java
@@ -0,0 +1,109 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.File;
+import java.io.FileReader;
+import java.io.IOException;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+public class KatiImpl implements Kati {
+    // Subdirectory inside out for config stuff.
+    private static final String CONFIG_SUBDIR = "config";
+
+    private final Errors mErrors;
+    private final Options mOptions;
+    private final KatiCommand mCommand;
+
+    // TODO: Do we need to consider the whole or a greater subset of the
+    // environment (or a hash of it?). In theory product-variant is enough, but we know
+    // people use stuff from the environment, even though we're trying to get rid of that.
+    private String getWorkDirPath() {
+        return Paths.get(mOptions.getOutDir(), CONFIG_SUBDIR,
+                mOptions.getProduct() + '-' + mOptions.getVariant()).toString();
+    }
+
+    private String getDumpConfigCsvPath() {
+        return Paths.get(getWorkDirPath(), "dumpconfig.csv").toString();
+    }
+
+    public KatiImpl(Errors errors, Options options) {
+        this(errors, options, new KatiCommandImpl(errors, options));
+    }
+
+    // VisibleForTesting
+    public KatiImpl(Errors errors, Options options, KatiCommand command) {
+        mErrors = errors;
+        mOptions = options;
+        mCommand = command;
+    }
+
+    @Override
+    public Map<String, MakeConfig> loadProductConfig() {
+        final String csvPath = getDumpConfigCsvPath();
+        try {
+            File workDir = new File(getWorkDirPath());
+
+            if ((workDir.exists() && !workDir.isDirectory()) || !workDir.mkdirs()) {
+                mErrors.ERROR_KATI.add("Unable to create directory: " + workDir);
+                return null; // TODO: throw exception?
+            }
+
+            String out = mCommand.run(new String[] {
+                    "-f", "build/make/core/dumpconfig.mk",
+                    "DUMPCONFIG_FILE=" + csvPath
+                });
+
+            if (!out.contains("***DONE***")) {
+                mErrors.ERROR_KATI.add(
+                        "Unknown error with kati, but it didn't print ***DONE*** message");
+                return null; // TODO: throw exception?
+            }
+            // TODO: Check that output was good.
+        } catch (KatiCommand.KatiException ex) {
+            mErrors.ERROR_KATI.add("Error running kati:\n" + ex.getStderr());
+            return null;
+        }
+
+        if (!(new File(csvPath)).canRead()) {
+            mErrors.ERROR_KATI.add("Kati ran but did not create " + csvPath);
+            return null;
+        }
+
+        try (FileReader reader = new FileReader(csvPath)) {
+            Map<String, MakeConfig> makeConfigs = DumpConfigParser.parse(mErrors, csvPath, reader);
+
+            if (makeConfigs.size() == 0) {
+                // TODO: Issue error?
+                return null;
+            }
+
+            return makeConfigs;
+        } catch (CsvParser.ParseException ex) {
+            mErrors.ERROR_KATI.add(new Position(csvPath, ex.getLine()),
+                    "Unable to parse output of dumpconfig.mk: " + ex.getMessage());
+            return null; // TODO: throw exception?
+        } catch (IOException ex) {
+            System.out.println(ex);
+            mErrors.ERROR_KATI.add("Unable to read " + csvPath + ": " + ex.getMessage());
+            return null; // TODO: throw exception?
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Main.java b/tools/product_config/src/com/android/build/config/Main.java
new file mode 100644
index 0000000..5cec55e
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Main.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+import java.util.TreeSet;
+
+public class Main {
+    private final Errors mErrors;
+    private final Options mOptions;
+
+    public Main(Errors errors, Options options) {
+        mErrors = errors;
+        mOptions = options;
+    }
+
+    void run() {
+        // TODO: Check the build environment to make sure we're running in a real
+        // build environment, e.g. actually inside a source tree, with TARGET_PRODUCT
+        // and TARGET_BUILD_VARIANT defined, etc.
+        Kati kati = new KatiImpl(mErrors, mOptions);
+        Map<String, MakeConfig> makeConfigs = kati.loadProductConfig();
+        if (makeConfigs == null || mErrors.hadError()) {
+            return;
+        }
+        if (false) {
+            for (MakeConfig makeConfig: (new TreeMap<String, MakeConfig>(makeConfigs)).values()) {
+                System.out.println();
+                System.out.println("=======================================");
+                System.out.println("PRODUCT CONFIG FILES : " + makeConfig.getPhase());
+                System.out.println("=======================================");
+                makeConfig.printToStream(System.out);
+            }
+        }
+
+        ConvertMakeToGenericConfig m2g = new ConvertMakeToGenericConfig(mErrors);
+        GenericConfig generic = m2g.convert(makeConfigs);
+        if (false) {
+            System.out.println("======================");
+            System.out.println("REGENERATED MAKE FILES");
+            System.out.println("======================");
+            MakeWriter.write(System.out, generic, 0);
+        }
+
+        // TODO: Lookup shortened name as used in PRODUCT_NAME / TARGET_PRODUCT
+        FlatConfig flat = FlattenConfig.flatten(mErrors, generic);
+        if (false) {
+            System.out.println("=======================");
+            System.out.println("FLATTENED VARIABLE LIST");
+            System.out.println("=======================");
+            MakeWriter.write(System.out, flat, 0);
+        }
+
+        OutputChecker checker = new OutputChecker(flat);
+        checker.reportErrors(mErrors);
+
+        // TODO: Run kati and extract the variables and convert all that into starlark files.
+
+        // TODO: Run starlark with all the generated ones and the hand written ones.
+
+        // TODO: Get the variables that were defined in starlark and use that to write
+        // out the make, soong and bazel input files.
+    }
+
+    public static void main(String[] args) {
+        Errors errors = new Errors();
+        int exitCode = 0;
+
+        try {
+            Options options = Options.parse(errors, args, System.getenv());
+            if (errors.hadError()) {
+                Options.printHelp(System.err);
+                System.err.println();
+                throw new CommandException();
+            }
+
+            switch (options.getAction()) {
+                case DEFAULT:
+                    (new Main(errors, options)).run();
+                    return;
+                case HELP:
+                    Options.printHelp(System.out);
+                    return;
+            }
+        } catch (CommandException | Errors.FatalException ex) {
+            // These are user errors, so don't show a stack trace
+            exitCode = 1;
+        } catch (Throwable ex) {
+            // These are programming errors in the code of this tool, so print the exception.
+            // We'll try to print this.  If it's something unrecoverable, then we'll hope
+            // for the best. We will still print the errors below, because they can be useful
+            // for debugging.
+            ex.printStackTrace(System.err);
+            System.err.println();
+            exitCode = 1;
+        } finally {
+            // Print errors and warnings
+            errors.printErrors(System.err);
+            if (errors.hadError()) {
+                exitCode = 1;
+            }
+            System.exit(exitCode);
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/MakeConfig.java b/tools/product_config/src/com/android/build/config/MakeConfig.java
new file mode 100644
index 0000000..dda0db9
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeConfig.java
@@ -0,0 +1,170 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeConfig extends ConfigBase {
+    /**
+     * The config files that were imported in this config pass.
+     */
+    protected final ArrayList<ConfigFile> mConfigFiles = new ArrayList();
+
+    public enum BlockType {
+        UNSET,
+        BEFORE,
+        INHERIT,
+        AFTER
+    }
+
+    public static class ConfigFile {
+        /**
+         * The name of the file, relative to the tree root.
+         */
+        private final String mFilename;
+
+        /**
+         * Sections of variable definitions and import statements. Product config
+         * files will always have at least one block.
+         */
+        private final ArrayList<Block> mBlocks = new ArrayList();
+
+        public ConfigFile(String filename) {
+            mFilename = filename;
+        }
+
+        public String getFilename() {
+            return mFilename;
+        }
+
+        public void addBlock(Block block) {
+            mBlocks.add(block);
+        }
+
+        public ArrayList<Block> getBlocks() {
+            return mBlocks;
+        }
+    }
+
+    /**
+     * A set of variables that were defined.
+     */
+    public static class Block {
+        private final BlockType mBlockType;
+        private final TreeMap<String, Str> mValues = new TreeMap();
+        private Str mInheritedFile;
+
+        public Block(BlockType blockType) {
+            mBlockType = blockType;
+        }
+
+        public BlockType getBlockType() {
+            return mBlockType;
+        }
+
+        public void addVar(String varName, Str varValue) {
+            mValues.put(varName, varValue);
+        }
+
+        public Str getVar(String varName) {
+            return mValues.get(varName);
+        }
+
+        public TreeMap<String, Str> getVars() {
+            return mValues;
+        }
+
+        public void setInheritedFile(Str filename) {
+            mInheritedFile = filename;
+        }
+
+        public Str getInheritedFile() {
+            return mInheritedFile;
+        }
+    }
+
+    /**
+     * Adds the given config file. Returns any one previously added, or null.
+     */
+    public ConfigFile addConfigFile(ConfigFile file) {
+        ConfigFile prev = null;
+        for (ConfigFile f: mConfigFiles) {
+            if (f.getFilename().equals(file.getFilename())) {
+                prev = f;
+                break;
+            }
+        }
+        mConfigFiles.add(file);
+        return prev;
+    }
+
+    public List<ConfigFile> getConfigFiles() {
+        return mConfigFiles;
+    }
+
+    public void printToStream(PrintStream out) {
+        out.println("MakeConfig {");
+        out.println("  phase: " + mPhase);
+        out.println("  rootNodes: " + mRootNodes);
+        out.print("  singleVars: [ ");
+        for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+            if (entry.getValue() == VarType.SINGLE) {
+                out.print(entry.getKey());
+                out.print(" ");
+            }
+        }
+        out.println("]");
+        out.print("  listVars: [ ");
+        for (Map.Entry<String,VarType> entry: mProductVars.entrySet()) {
+            if (entry.getValue() == VarType.LIST) {
+                out.print(entry.getKey());
+                out.print(" ");
+            }
+        }
+        out.println("]");
+        out.println("  configFiles: [");
+        for (final ConfigFile configFile: mConfigFiles) {
+            out.println("    ConfigFile {");
+            out.println("      filename: " + configFile.getFilename());
+            out.println("      blocks: [");
+            for (Block block: configFile.getBlocks()) {
+                out.println("        Block {");
+                out.println("          type: " + block.getBlockType());
+                if (block.getBlockType() == BlockType.INHERIT) {
+                    out.println("          inherited: " + block.getInheritedFile());
+                }
+                out.println("          values: {");
+                for (Map.Entry<String,Str> var: block.getVars().entrySet()) {
+                    if (!var.getKey().equals("PRODUCT_PACKAGES")) {
+                        continue;
+                    }
+                    out.println("            " + var.getKey() + ": " + var.getValue());
+                }
+                out.println("          }");
+                out.println("        }");
+            }
+            out.println("      ]");
+            out.println("    }");
+        }
+        out.println("  ] // configFiles");
+        out.println("} // MakeConfig");
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/MakeWriter.java b/tools/product_config/src/com/android/build/config/MakeWriter.java
new file mode 100644
index 0000000..15fd095
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/MakeWriter.java
@@ -0,0 +1,161 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class MakeWriter {
+    public static final int FLAG_WRITE_HEADER = 1;
+    public static final int FLAG_WRITE_ANNOTATIONS = 1 << 1;
+
+    private final boolean mWriteHeader;
+    private final boolean mWriteAnnotations;
+
+    public static void write(PrintStream out, GenericConfig config, int flags) {
+        (new MakeWriter(flags)).writeGeneric(out, config);
+    }
+
+    public static void write(PrintStream out, FlatConfig config, int flags) {
+        (new MakeWriter(flags)).writeFlat(out, config);
+    }
+
+
+    private MakeWriter(int flags) {
+        mWriteHeader = (flags & FLAG_WRITE_HEADER) != 0;
+        mWriteAnnotations = (flags & FLAG_WRITE_ANNOTATIONS) != 0;
+    }
+
+    private void writeGeneric(PrintStream out, GenericConfig config) {
+        for (GenericConfig.ConfigFile file: config.getFiles().values()) {
+            out.println("---------------------------------------------------------");
+            out.println("FILE: " + file.getFilename());
+            out.println("---------------------------------------------------------");
+            writeFile(out, config, file);
+            out.println();
+        }
+        out.println("---------------------------------------------------------");
+        out.println("VARIABLES TOUCHED BY MAKE BASED CONFIG:");
+        out.println("---------------------------------------------------------");
+        writeStrVars(out, OutputChecker.getModifiedVars(config.getInitialVariables(),
+                                          config.getFinalVariables()), config);
+    }
+
+    private void writeFile(PrintStream out, GenericConfig config, GenericConfig.ConfigFile file) {
+        if (mWriteHeader) {
+            out.println("# This file is generated by the product_config tool");
+        }
+        for (GenericConfig.Statement statement: file.getStatements()) {
+            if (statement instanceof GenericConfig.Assign) {
+                writeAssign(out, config, (GenericConfig.Assign)statement);
+            } else if (statement instanceof GenericConfig.Inherit) {
+                writeInherit(out, (GenericConfig.Inherit)statement);
+            } else {
+                throw new RuntimeException("Unexpected Statement: " + statement);
+            }
+        }
+    }
+
+    private void writeAssign(PrintStream out, GenericConfig config,
+            GenericConfig.Assign statement) {
+        final List<Str> values = statement.getValue();
+        final int size = values.size();
+        final String varName = statement.getName();
+        Position pos = null;
+        if (size == 0) {
+            return;
+        } else if (size == 1) {
+            // Plain :=
+            final Str value = values.get(0);
+            out.print(varName + " := " + value);
+            pos = value.getPosition();
+        } else if (size == 2 && values.get(0).toString().length() == 0) {
+            // Plain +=
+            final Str value = values.get(1);
+            out.print(varName + " += " + value);
+            pos = value.getPosition();
+        } else {
+            // Write it out the long way
+            out.print(varName + " := " + values.get(0));
+            for (int i = 1; i < size; i++) {
+                out.print("$(" + varName + ") " + values.get(i));
+                pos = values.get(i).getPosition();
+            }
+        }
+        if (mWriteAnnotations) {
+            out.print("  # " + config.getVarType(varName) + " " + pos);
+        }
+        out.println();
+    }
+
+    private void writeInherit(PrintStream out, GenericConfig.Inherit statement) {
+        final Str filename = statement.getFilename();
+        out.print("$(call inherit-product " + filename + ")");
+        if (mWriteAnnotations) {
+            out.print("  # " + filename.getPosition());
+        }
+        out.println();
+    }
+
+    private static class Var {
+        Var(String name, Str val) {
+            this.name = name;
+            this.val = val;
+        }
+        final String name;
+        final Str val;
+    }
+
+    private static void writeStrVars(PrintStream out, Map<String, Str> vars, ConfigBase config) {
+        // Sort by file name and var name
+        TreeMap<String, Var> sorted = new TreeMap();
+        for (Map.Entry<String, Str> entry: vars.entrySet()) {
+            sorted.put(entry.getValue().getPosition().toString() + " " + entry.getKey(),
+                    new Var(entry.getKey(), entry.getValue()));
+        }
+        // Print it
+        for (Var var: sorted.values()) {
+            out.println(var.val.getPosition() + var.name + " := " + var.val);
+        }
+    }
+
+    private void writeFlat(PrintStream out, FlatConfig config) {
+        // TODO: Print positions.
+        for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+            out.print(entry.getKey());
+            out.print(" := ");
+
+            final Value value = entry.getValue();
+            if (value.getVarType() == VarType.LIST) {
+                final List<Str> list = value.getList();
+                final int size = list.size();
+                for (int i = 0; i < size; i++) {
+                    out.print(list.get(i).toString());
+                    if (i != size - 1) {
+                        out.print(" \\\n        ");
+                    }
+                }
+            } else {
+                out.print(value.getStr().toString());
+            }
+            out.println();
+        }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Options.java b/tools/product_config/src/com/android/build/config/Options.java
new file mode 100644
index 0000000..ed544dc
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Options.java
@@ -0,0 +1,237 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.PrintStream;
+import java.util.Map;
+import java.util.TreeMap;
+
+public class Options {
+    public enum Action {
+        DEFAULT,
+        HELP
+    }
+
+    private Action mAction = Action.DEFAULT;
+
+    private String mProduct;
+    private String mVariant;
+    private String mOutDir;
+    private String mCKatiBin;
+
+    public Action getAction() {
+        return mAction;
+    }
+
+    public String getProduct() {
+        return mProduct;
+    }
+
+    public String getVariant() {
+        return mVariant;
+    }
+
+    public String getOutDir() {
+        return mOutDir != null ? mOutDir : "out";
+    }
+
+    public String getCKatiBin() {
+        return mCKatiBin;
+    }
+
+    public static void printHelp(PrintStream out) {
+        out.println("usage: product_config");
+        out.println();
+        out.println("REQUIRED FLAGS");
+        out.println("  --ckati_bin CKATI        Kati binary to use.");
+        out.println();
+        out.println("OPTIONAL FLAGS");
+        out.println("  --hide ERROR_ID          Suppress this error.");
+        out.println("  --error ERROR_ID         Make this ERROR_ID a fatal error.");
+        out.println("  --help -h                This message.");
+        out.println("  --warning ERROR_ID       Make this ERROR_ID a warning.");
+        out.println();
+        out.println("REQUIRED ENVIRONMENT");
+        out.println("  TARGET_PRODUCT           Product to build from lunch command.");
+        out.println("  TARGET_BUILD_VARIANT     Build variant from lunch command.");
+        out.println();
+        out.println("OPTIONAL ENVIRONMENT");
+        out.println("  OUT_DIR                  Build output directory. Defaults to \"out\".");
+        out.println();
+        out.println("ERRORS");
+        out.println("  The following are the errors that can be controlled on the");
+        out.println("  commandline with the --hide --warning --error flags.");
+
+        TreeMap<Integer,Errors.Category> sorted = new TreeMap((new Errors()).getCategories());
+
+        for (final Errors.Category category: sorted.values()) {
+            if (category.isLevelSettable()) {
+                out.println(String.format("    %-3d      %s", category.getCode(),
+                category.getHelp().replace("\n", "\n             ")));
+            }
+        }
+    }
+
+    static class Parser {
+        private static class ParseException extends Exception {
+            public ParseException(String message) {
+                super(message);
+            }
+        }
+
+        private Errors mErrors;
+        private String[] mArgs;
+        private Map<String,String> mEnv;
+        private Options mResult = new Options();
+        private int mIndex;
+        private boolean mSkipRequiredArgValidation;
+
+        public Parser(Errors errors, String[] args, Map<String,String> env) {
+            mErrors = errors;
+            mArgs = args;
+            mEnv = env;
+        }
+
+        public Options parse() {
+            // Args
+            try {
+                while (mIndex < mArgs.length) {
+                    final String arg = mArgs[mIndex];
+
+                    if ("--ckati_bin".equals(arg)) {
+                        mResult.mCKatiBin = requireNextStringArg(arg);
+                    } else if ("--hide".equals(arg)) {
+                        handleErrorCode(arg, Errors.Level.HIDDEN);
+                    } else if ("--error".equals(arg)) {
+                        handleErrorCode(arg, Errors.Level.ERROR);
+                    } else if ("--help".equals(arg) || "-h".equals(arg)) {
+                        // Help overrides all other commands if there isn't an error, but
+                        // we will stop here.
+                        if (!mErrors.hadError()) {
+                            mResult.mAction = Action.HELP;
+                        }
+                        return mResult;
+                    } else if ("--warning".equals(arg)) {
+                        handleErrorCode(arg, Errors.Level.WARNING);
+                    } else {
+                        throw new ParseException("Unknown command line argument: " + arg);
+                    }
+
+                    mIndex++;
+                }
+            } catch (ParseException ex) {
+                mErrors.ERROR_COMMAND_LINE.add(ex.getMessage());
+            }
+
+            // Environment
+            mResult.mProduct = mEnv.get("TARGET_PRODUCT");
+            mResult.mVariant = mEnv.get("TARGET_BUILD_VARIANT");
+            mResult.mOutDir = mEnv.get("OUT_DIR");
+
+            validateArgs();
+
+            return mResult;
+        }
+
+        /**
+         * For testing; don't generate errors about missing arguments
+         */
+        public void setSkipRequiredArgValidation() {
+            mSkipRequiredArgValidation = true;
+        }
+
+        private void validateArgs() {
+            if (!mSkipRequiredArgValidation) {
+                if (mResult.mCKatiBin == null || "".equals(mResult.mCKatiBin)) {
+                    addMissingArgError("--ckati_bin");
+                }
+                if (mResult.mProduct == null) {
+                    addMissingEnvError("TARGET_PRODUCT");
+                }
+                if (mResult.mVariant == null) {
+                    addMissingEnvError("TARGET_BUILD_VARIANT");
+                }
+            }
+        }
+
+        private void addMissingArgError(String argName) {
+            mErrors.ERROR_COMMAND_LINE.add("Required command line argument missing: "
+                    + argName);
+        }
+
+        private void addMissingEnvError(String envName) {
+            mErrors.ERROR_COMMAND_LINE.add("Required environment variable missing: "
+                    + envName);
+        }
+
+        private String getNextNonFlagArg() {
+            if (mIndex == mArgs.length - 1) {
+                return null;
+            }
+            if (mArgs[mIndex + 1].startsWith("-")) {
+                return null;
+            }
+            mIndex++;
+            return mArgs[mIndex];
+        }
+
+        private String requireNextStringArg(String arg) throws ParseException {
+            final String val = getNextNonFlagArg();
+            if (val == null) {
+                throw new ParseException(arg + " requires a string argument.");
+            }
+            return val;
+        }
+
+        private int requireNextNumberArg(String arg) throws ParseException {
+            final String val = getNextNonFlagArg();
+            if (val == null) {
+                throw new ParseException(arg + " requires a numeric argument.");
+            }
+            try {
+                return Integer.parseInt(val);
+            } catch (NumberFormatException ex) {
+                throw new ParseException(arg + " requires a numeric argument. found: " + val);
+            }
+        }
+
+        private void handleErrorCode(String arg, Errors.Level level) throws ParseException {
+            final int code = requireNextNumberArg(arg);
+            final Errors.Category category = mErrors.getCategories().get(code);
+            if (category == null) {
+                mErrors.WARNING_UNKNOWN_COMMAND_LINE_ERROR.add("Unknown error code: " + code);
+                return;
+            }
+            if (!category.isLevelSettable()) {
+                mErrors.ERROR_COMMAND_LINE.add("Can't set level for error " + code);
+                return;
+            }
+            category.setLevel(level);
+        }
+    }
+
+    /**
+     * Parse the arguments and return an options object.
+     * <p>
+     * Updates errors with the hidden / warning / error levels.
+     * <p>
+     * Adds errors encountered to Errors object.
+     */
+    public static Options parse(Errors errors, String[] args, Map<String, String> env) {
+        return (new Parser(errors, args, env)).parse();
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/OutputChecker.java b/tools/product_config/src/com/android/build/config/OutputChecker.java
new file mode 100644
index 0000000..d982dba
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/OutputChecker.java
@@ -0,0 +1,190 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.TreeMap;
+
+/**
+ * Compares the make-based configuration as reported by dumpconfig.mk
+ * with what was computed from the new tool.
+ */
+public class OutputChecker {
+    // Differences that we know about, either know issues to be fixed or intentional.
+    private static final RegexSet IGNORED_VARIABLES = new RegexSet(
+            // TODO: Rewrite the enforce packages exist logic into this tool.
+            "PRODUCT_ENFORCE_PACKAGES_EXIST",
+            "PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+            "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST",
+            "PRODUCTS\\..*\\.PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST",
+
+            // This is generated by this tool, but comes later in the make build system.
+            "INTERNAL_PRODUCT",
+
+            // This can be set temporarily by product_config.mk
+            ".KATI_ALLOW_RULES"
+            );
+
+    private final FlatConfig mConfig;
+    private final TreeMap<String, Variable> mVariables;
+
+    /**
+     * Represents the before and after state of a variable.
+     */
+    public static class Variable {
+        public final String name;
+        public final VarType type;
+        public final Str original;
+        public final Value updated;
+
+        public Variable(String name, VarType type, Str original) {
+            this(name, type, original, null);
+        }
+
+        public Variable(String name, VarType type, Str original, Value updated) {
+            this.name = name;
+            this.type = type;
+            this.original = original;
+            this.updated = updated;
+        }
+
+        /**
+         * Return copy of this Variable with the updated field also set.
+         */
+        public Variable addUpdated(Value updated) {
+            return new Variable(name, type, original, updated);
+        }
+
+        /**
+         * Return whether normalizedOriginal and normalizedUpdate are equal.
+         */
+        public boolean isSame() {
+            final Str normalizedOriginal = Value.normalize(original);
+            final Str normalizedUpdated = Value.normalize(updated);
+            if (normalizedOriginal == null && normalizedUpdated == null) {
+                return true;
+            } else if (normalizedOriginal != null) {
+                return normalizedOriginal.equals(normalizedUpdated);
+            } else {
+                return false;
+            }
+        }
+    }
+
+    /**
+     * Construct OutputChecker with the config it will check.
+     */
+    public OutputChecker(FlatConfig config) {
+        mConfig = config;
+        mVariables = getVariables(config);
+    }
+
+    /**
+     * Add a WARNING_DIFFERENT_FROM_KATI for each of the variables which have changed.
+     */
+    public void reportErrors(Errors errors) {
+        for (Variable var: getDifferences()) {
+            if (IGNORED_VARIABLES.matches(var.name)) {
+                continue;
+            }
+            errors.WARNING_DIFFERENT_FROM_KATI.add("product_config processing differs from"
+                    + " kati processing for " + var.type + " variable " + var.name + ".\n"
+                    + "original: "
+                    + Value.oneLinePerWord(var.original, "<null>") + "\n"
+                    + "updated: "
+                    + Value.oneLinePerWord(var.updated, "<null>"));
+        }
+    }
+
+    /**
+     * Get the Variables that are different between the normalized form of the original
+     * and updated.  If one is null and the other is not, even if one is an empty string,
+     * the values are considered different.
+     */
+    public List<Variable> getDifferences() {
+        final ArrayList<Variable> result = new ArrayList();
+        for (Variable var: mVariables.values()) {
+            if (!var.isSame()) {
+                result.add(var);
+            }
+        }
+        return result;
+    }
+
+    /**
+     * Get all of the variables for this config.
+     *
+     * VisibleForTesting
+     */
+    static TreeMap<String, Variable> getVariables(FlatConfig config) {
+        final TreeMap<String, Variable> result = new TreeMap();
+
+        // Add the original values to mAll
+        for (Map.Entry<String, Str> entry: getModifiedVars(config.getInitialVariables(),
+                    config.getFinalVariables()).entrySet()) {
+            final String name = entry.getKey();
+            result.put(name, new Variable(name, config.getVarType(name), entry.getValue()));
+        }
+
+        // Add the updated values to mAll
+        for (Map.Entry<String, Value> entry: config.getValues().entrySet()) {
+            final String name = entry.getKey();
+            final Value value = entry.getValue();
+            Variable var = result.get(name);
+            if (var == null) {
+                result.put(name, new Variable(name, config.getVarType(name), null, value));
+            } else {
+                result.put(name, var.addUpdated(value));
+            }
+        }
+
+        return result;
+    }
+
+    /**
+     * Get the entries that are different in the two maps.
+     */
+    public static Map<String, Str> getModifiedVars(Map<String, Str> before,
+            Map<String, Str> after) {
+        final HashMap<String, Str> result = new HashMap();
+
+        // Entries that were added or changed.
+        for (Map.Entry<String, Str> afterEntry: after.entrySet()) {
+            final String varName = afterEntry.getKey();
+            final Str afterValue = afterEntry.getValue();
+            final Str beforeValue = before.get(varName);
+            if (beforeValue == null || !beforeValue.equals(afterValue)) {
+                result.put(varName, afterValue);
+            }
+        }
+
+        // removed Entries that were removed, we just treat them as empty string
+        for (Map.Entry<String, Str> beforeEntry: before.entrySet()) {
+            final String varName = beforeEntry.getKey();
+            if (!after.containsKey(varName)) {
+                result.put(varName, new Str(""));
+            }
+        }
+
+        return result;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Position.java b/tools/product_config/src/com/android/build/config/Position.java
new file mode 100644
index 0000000..266021d
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Position.java
@@ -0,0 +1,111 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.regex.Matcher;
+import java.util.regex.Pattern;
+
+/**
+ * Position in a source file.
+ */
+public class Position implements Comparable<Position> {
+    /**
+     * Sentinel line number for when there is no known line number.
+     */
+    public static final int NO_LINE = -1;
+
+    private static final Pattern REGEX = Pattern.compile("([^:]*)(?::(\\d)*)?:?\\s*");
+    public static final String UNKNOWN = "<unknown>";
+
+    private final String mFile;
+    private final int mLine;
+
+    public Position() {
+        mFile = null;
+        mLine = NO_LINE;
+    }
+
+    public Position(String file) {
+        mFile = file;
+        mLine = NO_LINE;
+    }
+
+    public Position(String file, int line) {
+        if (line < NO_LINE) {
+            throw new IllegalArgumentException("Negative line number. file=" + file
+                    + " line=" + line);
+        }
+        mFile = file;
+        mLine = line;
+    }
+
+    public int compareTo(Position that) {
+        int result = mFile.compareTo(that.mFile);
+        if (result != 0) {
+            return result;
+        }
+        return mLine - that.mLine;
+    }
+
+    public String getFile() {
+        return mFile;
+    }
+
+    public int getLine() {
+        return mLine;
+    }
+
+    /**
+     * Return a Position object from a string containing <filename>:<line>, or the default
+     * Position(null, NO_LINE) if the string can't be parsed.
+     */
+    public static Position parse(String str) {
+        final Matcher m = REGEX.matcher(str);
+        if (!m.matches()) {
+            return new Position();
+        }
+        String filename = m.group(1);
+        if (filename.length() == 0 || UNKNOWN.equals(filename)) {
+            filename = null;
+        }
+        String lineString = m.group(2);
+        int line;
+        if (lineString == null || lineString.length() == 0) {
+            line = NO_LINE;
+        } else {
+            try {
+                line = Integer.parseInt(lineString);
+            } catch (NumberFormatException ex) {
+                line = NO_LINE;
+            }
+        }
+        return new Position(filename, line);
+    }
+
+    @Override
+    public String toString() {
+      if (mFile == null && mLine == NO_LINE) {
+        return "";
+      } else if (mFile == null && mLine != NO_LINE) {
+        return UNKNOWN + ":" + mLine + ": ";
+      } else if (mFile != null && mLine == NO_LINE) {
+        return mFile + ": ";
+      } else { // if (mFile != null && mLine != NO_LINE)
+        return mFile + ':' + mLine + ": ";
+      }
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/RegexSet.java b/tools/product_config/src/com/android/build/config/RegexSet.java
new file mode 100644
index 0000000..70fcd29
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/RegexSet.java
@@ -0,0 +1,43 @@
+/*
+ * Copyright (C) 2021 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.regex.Pattern;
+
+/**
+ * Returns whether a string matches one of a set of presupplied regexes.
+ */
+public class RegexSet {
+    private final Pattern[] mPatterns;
+
+    public RegexSet(String... patterns) {
+        mPatterns = new Pattern[patterns.length];
+        for (int i = 0; i < patterns.length; i++) {
+            mPatterns[i] = Pattern.compile(patterns[i]);
+        }
+    }
+
+    public boolean matches(String s) {
+        for (Pattern p: mPatterns) {
+            if (p.matcher(s).matches()) {
+                return true;
+            }
+        }
+        return false;
+    }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/Str.java b/tools/product_config/src/com/android/build/config/Str.java
new file mode 100644
index 0000000..2516b76
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Str.java
@@ -0,0 +1,82 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+
+/**
+ * A String and a Position, where it came from in source code.
+ */
+public class Str implements Comparable<Str> {
+    private String mValue;
+    private Position mPosition;
+
+    public Str(String s) {
+        mValue = s;
+        mPosition = new Position();
+    }
+
+    public Str(Position pos, String s) {
+        mValue = s;
+        mPosition = pos;
+    }
+
+    public int length() {
+        return mValue.length();
+    }
+
+    @Override
+    public String toString() {
+        return mValue;
+    }
+
+    public Position getPosition() {
+        return mPosition;
+    }
+
+    /**
+     * Str is equal if the string value is equal, regardless of whether the position
+     * is the same.
+     */
+    @Override
+    public boolean equals(Object o) {
+        if (!(o instanceof Str)) {
+            return false;
+        }
+        final Str that = (Str)o;
+        return mValue.equals(that.mValue);
+    }
+
+    @Override
+    public int hashCode() {
+        return mValue.hashCode();
+    }
+
+    @Override
+    public int compareTo(Str that) {
+        return this.mValue.compareTo(that.mValue);
+    }
+
+    public static ArrayList<Str> toList(Position pos, List<String> list) {
+        final ArrayList<Str> result = new ArrayList(list.size());
+        for (String s: list) {
+            result.add(new Str(pos, s));
+        }
+        return result;
+    }
+}
diff --git a/tools/product_config/src/com/android/build/config/Value.java b/tools/product_config/src/com/android/build/config/Value.java
new file mode 100644
index 0000000..9bd6401
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/Value.java
@@ -0,0 +1,218 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.regex.Pattern;
+import java.util.regex.Matcher;
+
+/**
+ * Class to hold the two types of variables we support, strings and lists of strings.
+ */
+public class Value {
+    private static final Pattern SPACES = Pattern.compile("\\s+");
+
+    private final VarType mVarType;
+    private final Str mStr;
+    private final ArrayList<Str> mList;
+
+    /**
+     * Construct an appropriately typed empty value.
+     */
+    public Value(VarType varType) {
+        mVarType = varType;
+        if (varType == VarType.LIST) {
+            mStr = null;
+            mList = new ArrayList();
+            mList.add(new Str(""));
+        } else {
+            mStr = new Str("");
+            mList = null;
+        }
+    }
+
+    public Value(VarType varType, Str str) {
+        mVarType = varType;
+        mStr = str;
+        mList = null;
+    }
+
+    public Value(List<Str> list) {
+        mVarType = VarType.LIST;
+        mStr = null;
+        mList = new ArrayList(list);
+    }
+
+    public VarType getVarType() {
+        return mVarType;
+    }
+
+    public Str getStr() {
+        return mStr;
+    }
+
+    public List<Str> getList() {
+        return mList;
+    }
+
+    /**
+     * Normalize a string that is behaving as a list.
+     */
+    public static String normalize(String str) {
+        if (str == null) {
+            return null;
+        }
+        return SPACES.matcher(str.trim()).replaceAll(" ").trim();
+    }
+
+    /**
+     * Normalize a string that is behaving as a list.
+     */
+    public static Str normalize(Str str) {
+        if (str == null) {
+            return null;
+        }
+        return new Str(str.getPosition(), normalize(str.toString()));
+    }
+
+    /**
+     * Normalize a this Value into the same format as normalize(Str).
+     */
+    public static Str normalize(Value val) {
+        if (val == null) {
+            return null;
+        }
+        if (val.mStr != null) {
+            return normalize(val.mStr);
+        }
+
+        if (val.mList.size() == 0) {
+            return new Str("");
+        }
+
+        StringBuilder result = new StringBuilder();
+        final int size = val.mList.size();
+        boolean first = true;
+        for (int i = 0; i < size; i++) {
+            String s = val.mList.get(i).toString().trim();
+            if (s.length() > 0) {
+                if (!first) {
+                    result.append(" ");
+                } else {
+                    first = false;
+                }
+                result.append(s);
+            }
+        }
+
+        // Just use the first item's position.
+        return new Str(val.mList.get(0).getPosition(), result.toString());
+    }
+
+    /**
+     * Put each word in 'str' on its own line in make format. If 'val' is null,
+     * 'nullValue' is returned.
+     */
+    public static String oneLinePerWord(Value val, String nullValue) {
+        if (val == null) {
+            return nullValue;
+        }
+        final String s = normalize(val).toString();
+        final Matcher m = SPACES.matcher(s);
+        final StringBuilder result = new StringBuilder();
+        if (s.length() > 0 && (val.mVarType == VarType.LIST || m.find())) {
+            result.append("\\\n  ");
+        }
+        result.append(m.replaceAll(" \\\\\n  "));
+        return result.toString();
+    }
+
+    /**
+     * Put each word in 'str' on its own line in make format. If 'str' is null,
+     * nullValue is returned.
+     */
+    public static String oneLinePerWord(Str str, String nullValue) {
+        if (str == null) {
+            return nullValue;
+        }
+        final Matcher m = SPACES.matcher(normalize(str.toString()));
+        final StringBuilder result = new StringBuilder();
+        if (m.find()) {
+            result.append("\\\n  ");
+        }
+        result.append(m.replaceAll(" \\\\\n  "));
+        return result.toString();
+    }
+
+    /**
+     * Return a string representing this value with detailed debugging information.
+     */
+    public static String debugString(Value val) {
+        if (val == null) {
+            return "null";
+        }
+
+        final StringBuilder str = new StringBuilder("Value(");
+        if (val.mStr != null) {
+            str.append("mStr=");
+            str.append("\"");
+            str.append(val.mStr.toString());
+            str.append("\"");
+            if (false) {
+                str.append(" (");
+                str.append(val.mStr.getPosition().toString());
+                str.append(")");
+            }
+        }
+        if (val.mList != null) {
+            str.append("mList=");
+            str.append("[");
+            for (Str s: val.mList) {
+                str.append(" \"");
+                str.append(s.toString());
+                if (false) {
+                    str.append("\" (");
+                    str.append(s.getPosition().toString());
+                    str.append(")");
+                } else {
+                    str.append("\"");
+                }
+            }
+            str.append(" ]");
+        }
+        str.append(")");
+        return str.toString();
+    }
+
+    /**
+     * Get the Positions of all of the parts of this Value.
+     */
+    public List<Position> getPositions() {
+        List<Position> result = new ArrayList();
+        if (mStr != null) {
+            result.add(mStr.getPosition());
+        }
+        if (mList != null) {
+            for (Str str: mList) {
+                result.add(str.getPosition());
+            }
+        }
+        return result;
+    }
+}
+
diff --git a/tools/product_config/src/com/android/build/config/VarType.java b/tools/product_config/src/com/android/build/config/VarType.java
new file mode 100644
index 0000000..43e9366
--- /dev/null
+++ b/tools/product_config/src/com/android/build/config/VarType.java
@@ -0,0 +1,41 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+/**
+ * Whether a product config variable is a list or single-value variable.
+ */
+public enum VarType {
+    /**
+     * A product config variable that is a list of space separated strings.
+     * These are defined by _product_single_value_vars in product.mk.
+     */
+    LIST,
+
+    /**
+     * A product config varaible that is a single string.
+     * These are defined by _product_list_vars in product.mk.
+     */
+    SINGLE,
+
+    /**
+     * A variable that is given the special product config handling but is
+     * nonetheless defined by product config makefiles.
+     */
+    UNKNOWN
+}
+
diff --git a/tools/product_config/test.sh b/tools/product_config/test.sh
new file mode 100755
index 0000000..ee9ed5c
--- /dev/null
+++ b/tools/product_config/test.sh
@@ -0,0 +1,120 @@
+#!/bin/bash
+
+#
+# This script runs the full set of tests for product config:
+# 1. Build the product-config tool.
+# 2. Run the unit tests.
+# 3. Run the product config for every product available in the current
+#    source tree, for each of user, userdebug and eng.
+#       - To restrict which products or variants are run, set the
+#         PRODUCTS or VARIANTS environment variables.
+#       - Products for which the make based product config fails are
+#         skipped.
+#
+
+# The PRODUCTS variable is used by the build, and setting it in the environment
+# interferes with that, so unset it.  (That should probably be fixed)
+products=$PRODUCTS
+variants=$VARIANTS
+unset PRODUCTS
+unset VARIANTS
+
+# Don't use lunch from the user's shell
+unset TARGET_PRODUCT
+unset TARGET_BUILD_VARIANT
+
+function die() {
+    format=$1
+    shift
+    printf "$format\nStopping...\n" $@ >&2
+    exit 1;
+}
+
+[[ -f build/make/envsetup.sh ]] || die "Run this script from the root of the tree."
+: ${products:=$(build/soong/soong_ui.bash --dumpvar-mode all_named_products | sed -e "s/ /\n/g" | sort -u )}
+: ${variants:="user userdebug eng"}
+: ${CKATI_BIN:=prebuilts/build-tools/$(build/soong/soong_ui.bash --dumpvar-mode HOST_PREBUILT_TAG)/bin/ckati}
+
+function if_signal_exit() {
+    [[ $1 -lt 128 ]] || exit $1
+}
+
+build/soong/soong_ui.bash --build-mode --all-modules --dir="$(pwd)" product-config-test product-config \
+    || die "Build failed."
+
+echo
+echo Running unit tests
+java -jar out/host/linux-x86/testcases/product-config-test/product-config-test.jar
+unit_tests=$?
+if_signal_exit $unit_tests
+
+failed_baseline_checks=
+for product in $products ; do
+    for variant in $variants ; do
+        echo
+        echo "Checking: lunch $product-$variant"
+
+        TARGET_PRODUCT=$product \
+            TARGET_BUILD_VARIANT=$variant \
+            build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+        exit_status=$?
+        if_signal_exit $exit_status
+        if [ $exit_status -ne 0 ] ; then
+            echo "*** Combo fails with make, skipping product-config test run for $product-$variant"
+        else
+            rm -rf out/config/$product-$variant
+            TARGET_PRODUCT=$product TARGET_BUILD_VARIANT=$variant product-config \
+                            --ckati_bin $CKATI_BIN \
+                            --error 1000
+            exit_status=$?
+            if_signal_exit $exit_status
+            if [ $exit_status -ne 0 ] ; then
+                failed_baseline_checks="$failed_baseline_checks $product-$variant"
+            fi
+            if [ "$CHECK_FOR_RULES" != "" ] ; then
+                # This is a little bit of sleight of hand for good output formatting at the
+                # expense of speed. We've already run the command once without
+                # ALLOW_RULES_IN_PRODUCT_CONFIG, so we know it passes there. We run it again
+                # with ALLOW_RULES_IN_PRODUCT_CONFIG=error to see if it fails, but that will
+                # cause it to only print the first error. But we want to see all of them,
+                # so if it fails we run it a third time with ALLOW_RULES_IN_PRODUCT_CONFIG=warning,
+                # so we can see all the warnings.
+                TARGET_PRODUCT=$product \
+                    TARGET_BUILD_VARIANT=$variant \
+                    ALLOW_RULES_IN_PRODUCT_CONFIG=error \
+                    build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT &> /dev/null
+                exit_status=$?
+                if_signal_exit $exit_status
+                if [ $exit_status -ne 0 ] ; then
+                    TARGET_PRODUCT=$product \
+                        TARGET_BUILD_VARIANT=$variant \
+                        ALLOW_RULES_IN_PRODUCT_CONFIG=warning \
+                        build/soong/soong_ui.bash --dumpvar-mode TARGET_PRODUCT > /dev/null
+                    failed_rule_checks="$failed_rule_checks $product-$variant"
+                fi
+            fi
+        fi
+    done
+done
+
+echo
+echo
+echo "------------------------------"
+echo SUMMARY
+echo "------------------------------"
+
+echo -n "Unit tests        "
+if [ $unit_tests -eq 0 ] ; then echo PASSED ; else echo FAILED ; fi
+
+echo -n "Baseline checks   "
+if [ "$failed_baseline_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_baseline_checks ; do
+    echo "                   ... $combo"
+done
+
+echo -n "Rules checks      "
+if [ "$failed_rule_checks" = "" ] ; then echo PASSED ; else echo FAILED ; fi
+for combo in $failed_rule_checks ; do
+    echo "                   ... $combo"
+done
+
diff --git a/tools/product_config/test/com/android/build/config/CsvParserTest.java b/tools/product_config/test/com/android/build/config/CsvParserTest.java
new file mode 100644
index 0000000..6f38d68
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/CsvParserTest.java
@@ -0,0 +1,148 @@
+
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.io.StringReader;
+import java.util.Arrays;
+import java.util.List;
+
+/**
+ * Test for CSV parser class.
+ */
+public class CsvParserTest {
+    public String listsToStrings(String[] expected, List<String> actual) {
+        return "expected=" + Arrays.toString(expected)
+                + " actual=" + Arrays.toString(actual.toArray());
+    }
+
+    public void assertLineEquals(CsvParser.Line actual, int lineno, String... fields) {
+        if (actual.getLine() != lineno) {
+            throw new RuntimeException("lineno mismatch: expected=" + lineno
+                    + " actual=" + actual.getLine());
+        }
+        if (fields.length != actual.getFields().size()) {
+            throw new RuntimeException("getFields().size() mismatch: expected=" + fields.length
+                    + " actual=" + actual.getFields().size()
+                    + " values: " + listsToStrings(fields, actual.getFields()));
+        }
+        for (int i = 0; i < fields.length; i++) {
+            if (!fields[i].equals(actual.getFields().get(i))) {
+                throw new RuntimeException("getFields().get(" + i + ") mismatch: expected="
+                        + fields[i] + " actual=" + actual.getFields().get(i)
+                        + " values: " + listsToStrings(fields, actual.getFields()));
+
+            }
+        }
+    }
+
+    @Test
+    public void testEmptyString() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    ""));
+
+        Assert.assertEquals(0, lines.size());
+    }
+
+    @Test
+    public void testLexerOneCharacter() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    "a"));
+
+        Assert.assertEquals(1, lines.size());
+        assertLineEquals(lines.get(0), 1, "a");
+    }
+
+    @Test
+    public void testLexerTwoFieldsNoNewline() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    "a,b"));
+
+        Assert.assertEquals(1, lines.size());
+        assertLineEquals(lines.get(0), 1, "a", "b");
+    }
+
+    @Test
+    public void testLexerTwoFieldsNewline() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    "a,b\n"));
+
+        Assert.assertEquals(1, lines.size());
+        assertLineEquals(lines.get(0), 1, "a", "b");
+    }
+
+    @Test
+    public void testEndsWithTwoNewlines() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    "a,b\n\n"));
+
+        Assert.assertEquals(1, lines.size());
+        assertLineEquals(lines.get(0), 1, "a", "b");
+    }
+
+    @Test
+    public void testOnlyNewlines() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    "\n\n\n\n"));
+
+        Assert.assertEquals(0, lines.size());
+    }
+
+
+    @Test
+    public void testLexerComplex() throws Exception {
+        List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                    ",\"ab\"\"\nc\",,de\n"
+                    + "fg,\n"
+                    + "\n"
+                    + ",\n"
+                    + "hijk"));
+
+        Assert.assertEquals(4, lines.size());
+        assertLineEquals(lines.get(0), 2, "", "ab\"\nc", "", "de");
+        assertLineEquals(lines.get(1), 3, "fg", "");
+        assertLineEquals(lines.get(2), 5, "", "");
+        assertLineEquals(lines.get(3), 6, "hijk");
+    }
+
+    @Test
+    public void testEndInsideQuoted() throws Exception {
+        try {
+            List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                        "\"asd"));
+            throw new RuntimeException("Didn't throw ParseException");
+        } catch (CsvParser.ParseException ex) {
+            System.out.println("Caught: " + ex);
+        }
+    }
+
+    @Test
+    public void testCharacterAfterQuotedField() throws Exception {
+        try {
+            List<CsvParser.Line> lines = CsvParser.parse(new StringReader(
+                        "\"\"a"));
+            throw new RuntimeException("Didn't throw ParseException");
+        } catch (CsvParser.ParseException ex) {
+            System.out.println("Caught: " + ex);
+        }
+    }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/ErrorReporterTest.java b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
new file mode 100644
index 0000000..b9b25b4
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/ErrorReporterTest.java
@@ -0,0 +1,121 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashSet;
+import java.util.List;
+
+public class ErrorReporterTest {
+    /**
+     * Test that errors can be recorded and retrieved.
+     */
+    @Test
+    public void testAdding() {
+        TestErrors errors = new TestErrors();
+
+        errors.ERROR.add(new Position("a", 12), "Errrororrrr");
+
+        Assert.assertTrue(errors.hadWarningOrError());
+        Assert.assertTrue(errors.hadError());
+
+        List<TestErrors.Entry> entries = errors.getEntries();
+        Assert.assertEquals(1, entries.size());
+
+        TestErrors.Entry entry = entries.get(0);
+        Assert.assertEquals(errors.ERROR, entry.getCategory());
+        Assert.assertEquals("a", entry.getPosition().getFile());
+        Assert.assertEquals(12, entry.getPosition().getLine());
+        Assert.assertEquals("Errrororrrr", entry.getMessage());
+
+        Assert.assertNotEquals("", errors.getErrorMessages());
+    }
+
+    /**
+     * Test that not adding an error doesn't record errors.
+     */
+    @Test
+    public void testNoError() {
+        TestErrors errors = new TestErrors();
+
+        Assert.assertFalse(errors.hadWarningOrError());
+        Assert.assertFalse(errors.hadError());
+        Assert.assertEquals("", errors.getErrorMessages());
+    }
+
+    /**
+     * Test that not adding a warning doesn't record errors.
+     */
+    @Test
+    public void testWarning() {
+        TestErrors errors = new TestErrors();
+
+        errors.WARNING.add("Waaaaarninggggg");
+
+        Assert.assertTrue(errors.hadWarningOrError());
+        Assert.assertFalse(errors.hadError());
+        Assert.assertNotEquals("", errors.getErrorMessages());
+    }
+
+    /**
+     * Test that hidden warnings don't report.
+     */
+    @Test
+    public void testHidden() {
+        TestErrors errors = new TestErrors();
+
+        errors.HIDDEN.add("Hidddeennn");
+
+        Assert.assertFalse(errors.hadWarningOrError());
+        Assert.assertFalse(errors.hadError());
+        Assert.assertEquals("", errors.getErrorMessages());
+    }
+
+    /**
+     * Test changing an error level.
+     */
+    @Test
+    public void testSetLevel() {
+        TestErrors errors = new TestErrors();
+        Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR.getLevel());
+
+        errors.ERROR.setLevel(TestErrors.Level.WARNING);
+
+        Assert.assertEquals(TestErrors.Level.WARNING, errors.ERROR.getLevel());
+    }
+
+    /**
+     * Test that changing a fixed error fails.
+     */
+    @Test
+    public void testSetLevelFails() {
+        TestErrors errors = new TestErrors();
+        Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR_FIXED.getLevel());
+
+        boolean exceptionThrown = false;
+        try {
+            errors.ERROR_FIXED.setLevel(TestErrors.Level.WARNING);
+        } catch (RuntimeException ex) {
+            exceptionThrown = true;
+        }
+
+        Assert.assertTrue(exceptionThrown);
+        Assert.assertEquals(TestErrors.Level.ERROR, errors.ERROR_FIXED.getLevel());
+    }
+}
diff --git a/tools/product_config/test/com/android/build/config/OptionsTest.java b/tools/product_config/test/com/android/build/config/OptionsTest.java
new file mode 100644
index 0000000..459efa5
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/OptionsTest.java
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+
+public class OptionsTest {
+
+    private Options parse(Errors errors, String[] args) {
+        final HashMap<String, String> env = new HashMap();
+        env.put("TARGET_PRODUCT", "test_product");
+        env.put("TARGET_BUILD_VARIANT", "user");
+        final Options.Parser parser = new Options.Parser(errors, args, env);
+        parser.setSkipRequiredArgValidation();
+        return parser.parse();
+    }
+
+    @Test
+    public void testErrorMissingLast() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--error"
+                });
+
+        Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+    }
+
+    @Test
+    public void testErrorMissingNotLast() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--error", "--warning", "2"
+                });
+
+        Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+    }
+
+    @Test
+    public void testErrorNotNumeric() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--error", "notgood"
+                });
+
+        Assert.assertNotEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        TestErrors.assertHasEntry(errors.ERROR_COMMAND_LINE, errors);
+    }
+
+    @Test
+    public void testErrorInvalidError() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--error", "50000"
+                });
+
+        Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        TestErrors.assertHasEntry(errors.WARNING_UNKNOWN_COMMAND_LINE_ERROR, errors);
+    }
+
+    @Test
+    public void testErrorOne() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--error", "2"
+                });
+
+        Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        Assert.assertFalse(errors.hadWarningOrError());
+    }
+
+    @Test
+    public void testWarningOne() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--warning", "2"
+                });
+
+        Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        Assert.assertFalse(errors.hadWarningOrError());
+    }
+
+    @Test
+    public void testHideOne() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[] {
+                    "--hide", "2"
+                });
+
+        Assert.assertEquals("", TestErrors.getErrorMessages(errors));
+        Assert.assertEquals(Options.Action.DEFAULT, options.getAction());
+        Assert.assertFalse(errors.hadWarningOrError());
+    }
+
+    @Test
+    public void testEnv() {
+        final Errors errors = new Errors();
+
+        final Options options = parse(errors, new String[0]);
+
+        Assert.assertEquals("test_product", options.getProduct());
+        Assert.assertEquals("user", options.getVariant());
+        Assert.assertFalse(errors.hadWarningOrError());
+    }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/PositionTest.java b/tools/product_config/test/com/android/build/config/PositionTest.java
new file mode 100644
index 0000000..82b5dd4
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/PositionTest.java
@@ -0,0 +1,68 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.Assert;
+import org.junit.Test;
+
+import java.util.HashMap;
+
+public class PositionTest {
+
+    @Test
+    public void testParseEmpty() {
+        final Position pos = Position.parse("");
+
+        Assert.assertEquals(null, pos.getFile());
+        Assert.assertEquals(Position.NO_LINE, pos.getLine());
+    }
+
+    @Test
+    public void testParseOnlyFile() {
+        final Position pos = Position.parse("asdf");
+
+        Assert.assertEquals("asdf", pos.getFile());
+        Assert.assertEquals(Position.NO_LINE, pos.getLine());
+    }
+
+    @Test
+    public void testParseBoth() {
+        final Position pos = Position.parse("asdf:1");
+
+        Assert.assertEquals("asdf", pos.getFile());
+        Assert.assertEquals(1, pos.getLine());
+    }
+
+    @Test
+    public void testParseEndsWithColon() {
+        final Position pos = Position.parse("asdf:");
+
+        Assert.assertEquals("asdf", pos.getFile());
+        Assert.assertEquals(Position.NO_LINE, pos.getLine());
+    }
+
+    @Test
+    public void testParseEndsWithSpace() {
+        final Position pos = Position.parse("asdf: ");
+
+        Assert.assertEquals("asdf", pos.getFile());
+        Assert.assertEquals(Position.NO_LINE, pos.getLine());
+    }
+
+
+}
+
diff --git a/tools/product_config/test/com/android/build/config/TestErrors.java b/tools/product_config/test/com/android/build/config/TestErrors.java
new file mode 100644
index 0000000..dde88b0
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/TestErrors.java
@@ -0,0 +1,80 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import java.io.ByteArrayOutputStream;
+import java.io.PrintStream;
+import java.io.UnsupportedEncodingException;
+import java.nio.charset.StandardCharsets;
+
+/**
+ * Errors for testing.
+ */
+public class TestErrors extends ErrorReporter {
+
+    public static final int ERROR_CODE = 1;
+
+    public final Category ERROR = new Category(ERROR_CODE, true, Level.ERROR,
+            "An error.");
+
+    public static final int WARNING_CODE = 2;
+
+    public final Category WARNING = new Category(WARNING_CODE, true, Level.WARNING,
+            "A warning.");
+
+    public static final int HIDDEN_CODE = 3;
+
+    public final Category HIDDEN = new Category(HIDDEN_CODE, true, Level.HIDDEN,
+            "A hidden warning.");
+
+    public static final int ERROR_FIXED_CODE = 4;
+
+    public final Category ERROR_FIXED = new Category(ERROR_FIXED_CODE, false, Level.ERROR,
+            "An error that can't have its level changed.");
+
+    public void assertHasEntry(Errors.Category category) {
+        assertHasEntry(category, this);
+    }
+
+    public String getErrorMessages() {
+        return getErrorMessages(this);
+    }
+
+    public static void assertHasEntry(Errors.Category category, ErrorReporter errors) {
+        StringBuilder found = new StringBuilder();
+        for (Errors.Entry entry: errors.getEntries()) {
+            if (entry.getCategory() == category) {
+                return;
+            }
+            found.append(' ');
+            found.append(entry.getCategory().getCode());
+        }
+        throw new AssertionError("No error category " + category.getCode() + " found."
+                + " Found category codes were:" + found);
+    }
+
+    public static String getErrorMessages(ErrorReporter errors) {
+        final ByteArrayOutputStream stream = new ByteArrayOutputStream();
+        try {
+            errors.printErrors(new PrintStream(stream, true, StandardCharsets.UTF_8.name()));
+        } catch (UnsupportedEncodingException ex) {
+            // utf-8 is always supported
+        }
+        return new String(stream.toByteArray(), StandardCharsets.UTF_8);
+    }
+}
+
diff --git a/tools/product_config/test/com/android/build/config/TestRunner.java b/tools/product_config/test/com/android/build/config/TestRunner.java
new file mode 100644
index 0000000..546518f
--- /dev/null
+++ b/tools/product_config/test/com/android/build/config/TestRunner.java
@@ -0,0 +1,51 @@
+/*
+ * Copyright (C) 2020 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.build.config;
+
+import org.junit.runner.Description;
+import org.junit.runner.JUnitCore;
+import org.junit.runner.Result;
+import org.junit.runner.notification.Failure;
+import org.junit.runner.notification.RunListener;
+
+public class TestRunner {
+    public static void main(String[] args) {
+        JUnitCore junit = new JUnitCore();
+
+        junit.addListener(new RunListener() {
+                    @Override
+                    public void testStarted(Description description) {
+                        System.out.println("\nSTARTING: " + description.getDisplayName());
+                    }
+
+                    @Override
+                    public void testFailure(Failure failure) {
+                        System.out.println("FAILED: "
+                                + failure.getDescription().getDisplayName());
+                        System.out.println(failure.getTrace());
+                    }
+                });
+        Result result = junit.run(CsvParserTest.class,
+                                  ErrorReporterTest.class,
+                                  OptionsTest.class,
+                                  PositionTest.class);
+        if (!result.wasSuccessful()) {
+            System.out.println("\n*** FAILED ***");
+        }
+    }
+}
+
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 5cb639a..6d88249 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -22,12 +22,18 @@
 //    `releasetools_X_defaults` in their defaults.
 //
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 python_defaults {
     name: "releasetools_add_img_to_target_files_defaults",
     srcs: [
         "add_img_to_target_files.py",
     ],
     libs: [
+        "ota_metadata_proto",
+        "releasetools_apex_utils",
         "releasetools_build_image",
         "releasetools_build_super_image",
         "releasetools_common",
@@ -122,13 +128,15 @@
         "releasetools_check_target_files_vintf",
         "releasetools_common",
         "releasetools_verity_utils",
+        "apex_manifest",
     ],
     required: [
         "brillo_update_payload",
         "checkvintf",
         "lz4",
         "toybox",
-        "unpack_bootimg"
+        "unpack_bootimg",
+        "deapexer",
     ],
     target: {
         darwin: {
@@ -169,6 +177,8 @@
         "apex_utils.py",
     ],
     libs: [
+        "apex_manifest",
+        "ota_metadata_proto",
         "releasetools_common",
     ],
 }
@@ -219,6 +229,8 @@
         "lz4",
         "mkbootfs",
         "signapk",
+        "toybox",
+        "unpack_bootimg",
     ],
 }
 
@@ -274,11 +286,14 @@
         "boot_signer",
         "brotli",
         "bsdiff",
+        "deapexer",
         "imgdiff",
         "minigzip",
         "lz4",
         "mkbootfs",
         "signapk",
+        "toybox",
+        "unpack_bootimg",
     ],
 }
 
@@ -436,6 +451,7 @@
     required: [
         "checkvintf",
         "host_init_verifier",
+        "secilc",
     ],
     target: {
         darwin: {
@@ -540,6 +556,8 @@
     ],
     data: [
         "testdata/**/*",
+        ":com.android.apex.compressed.v1",
+        ":com.android.apex.compressed.v1_original",
     ],
     target: {
         darwin: {
@@ -547,6 +565,9 @@
             enabled: false,
         },
     },
+    required: [
+        "deapexer",
+    ],
 }
 
 python_test_host {
@@ -575,6 +596,7 @@
     name: "releasetools_py3_test",
     defaults: ["releasetools_test_defaults"],
     main: "test_utils.py",
+    test_suites: ["general-tests"],
     version: {
         py2: {
             enabled: false,
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 5f35d78..900c7b5 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -62,6 +62,9 @@
 import rangelib
 import sparse_img
 import verity_utils
+import ota_metadata_pb2
+
+from apex_utils import GetSystemApexInfoFromTargetFiles
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -94,13 +97,13 @@
     name: The name of the output file, regardless of the final destination.
   """
 
-  def __init__(self, output_zip, input_dir, prefix, name):
+  def __init__(self, output_zip, input_dir, *args):
     # We write the intermediate output file under the given input_dir, even if
     # the final destination is a zip archive.
-    self.name = os.path.join(input_dir, prefix, name)
+    self.name = os.path.join(input_dir, *args)
     self._output_zip = output_zip
     if self._output_zip:
-      self._zip_name = os.path.join(prefix, name)
+      self._zip_name = os.path.join(*args)
 
   def Write(self):
     if self._output_zip:
@@ -179,7 +182,6 @@
   block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.map")
   CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system", img,
               block_list=block_list)
-
   return img.name
 
 
@@ -348,6 +350,41 @@
   img.Write()
   return img.name
 
+def AddPvmfw(output_zip):
+  """Adds the pvmfw image.
+
+  Uses the image under IMAGES/ if it already exists. Otherwise looks for the
+  image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
+  """
+  img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "pvmfw.img")
+  if os.path.exists(img.name):
+    logger.info("pvmfw.img already exists; no need to rebuild...")
+    return img.name
+
+  pvmfw_prebuilt_path = os.path.join(
+      OPTIONS.input_tmp, "PREBUILT_IMAGES", "pvmfw.img")
+  assert os.path.exists(pvmfw_prebuilt_path)
+  shutil.copy(pvmfw_prebuilt_path, img.name)
+
+  # AVB-sign the image as needed.
+  if OPTIONS.info_dict.get("avb_enable") == "true":
+    # Signing requires +w
+    os.chmod(img.name, os.stat(img.name).st_mode | stat.S_IWUSR)
+
+    avbtool = OPTIONS.info_dict["avb_avbtool"]
+    part_size = OPTIONS.info_dict["pvmfw_size"]
+    # The AVB hash footer will be replaced if already present.
+    cmd = [avbtool, "add_hash_footer", "--image", img.name,
+           "--partition_size", str(part_size), "--partition_name", "pvmfw"]
+    common.AppendAVBSigningArgs(cmd, "pvmfw")
+    args = OPTIONS.info_dict.get("avb_pvmfw_add_hash_footer_args")
+    if args and args.strip():
+      cmd.extend(shlex.split(args))
+    common.RunAndCheckOutput(cmd)
+
+  img.Write()
+  return img.name
+
 def AddCustomImages(output_zip, partition_name):
   """Adds and signs custom images in IMAGES/.
 
@@ -754,6 +791,22 @@
               os.path.join(OPTIONS.input_tmp, "IMAGES",
                            "{}.img".format(partition_name))))
 
+def AddApexInfo(output_zip):
+  apex_infos = GetSystemApexInfoFromTargetFiles(OPTIONS.input_tmp)
+  apex_metadata_proto = ota_metadata_pb2.ApexMetadata()
+  apex_metadata_proto.apex_info.extend(apex_infos)
+  apex_info_bytes = apex_metadata_proto.SerializeToString()
+
+  output_file = os.path.join(OPTIONS.input_tmp, "META", "apex_info.pb")
+  with open(output_file, "wb") as ofile:
+    ofile.write(apex_info_bytes)
+  if output_zip:
+    arc_name = "META/apex_info.pb"
+    if arc_name in output_zip.namelist():
+      OPTIONS.replace_updated_files_list.append(arc_name)
+    else:
+      common.ZipWrite(output_zip, output_file, arc_name)
+
 
 def AddImagesToTargetFiles(filename):
   """Creates and adds images (boot/recovery/system/...) to a target_files.zip.
@@ -914,6 +967,8 @@
     banner("system_other")
     AddSystemOther(output_zip)
 
+  AddApexInfo(output_zip)
+
   if not OPTIONS.is_signing:
     banner("userdata")
     AddUserdata(output_zip)
@@ -928,6 +983,10 @@
     banner("dtbo")
     partitions['dtbo'] = AddDtbo(output_zip)
 
+  if OPTIONS.info_dict.get("has_pvmfw") == "true":
+    banner("pvmfw")
+    partitions['pvmfw'] = AddPvmfw(output_zip)
+
   # Custom images.
   custom_partitions = OPTIONS.info_dict.get(
       "avb_custom_images_partition_list", "").strip().split()
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 6808f15..1c88053 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -21,7 +21,12 @@
 import shutil
 import zipfile
 
+import apex_manifest
 import common
+from common import UnzipTemp, RunAndCheckOutput, MakeTempFile, OPTIONS
+
+import ota_metadata_pb2
+
 
 logger = logging.getLogger(__name__)
 
@@ -29,6 +34,8 @@
 
 APEX_PAYLOAD_IMAGE = 'apex_payload.img'
 
+APEX_PUBKEY = 'apex_pubkey'
+
 
 class ApexInfoError(Exception):
   """An Exception raised during Apex Information command."""
@@ -69,7 +76,7 @@
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
           "Couldn't find location of debugfs_static: " +
-          "Path {} does not exist. ".format(debugfs_path) +
+          "Path {} does not exist. ".format(self.debugfs_path) +
           "Make sure bin/debugfs_static can be found in -p <path>")
     list_cmd = ['deapexer', '--debugfs_path',
                 self.debugfs_path, 'list', self.apex_path]
@@ -105,7 +112,7 @@
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
           "Couldn't find location of debugfs_static: " +
-          "Path {} does not exist. ".format(debugfs_path) +
+          "Path {} does not exist. ".format(self.debugfs_path) +
           "Make sure bin/debugfs_static can be found in -p <path>")
     payload_dir = common.MakeTempDir()
     extract_cmd = ['deapexer', '--debugfs_path',
@@ -127,8 +134,9 @@
       # signed apk file.
       unsigned_apk = common.MakeTempFile()
       os.rename(apk_path, unsigned_apk)
-      common.SignFile(unsigned_apk, apk_path, key_name, self.key_passwords.get(key_name),
-                      codename_to_api_level_map=self.codename_to_api_level_map)
+      common.SignFile(
+          unsigned_apk, apk_path, key_name, self.key_passwords.get(key_name),
+          codename_to_api_level_map=self.codename_to_api_level_map)
       has_signed_apk = True
     return payload_dir, has_signed_apk
 
@@ -300,13 +308,13 @@
   return payload_info
 
 
-def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
-             apk_keys, codename_to_api_level_map,
-             no_hashtree, signing_args=None):
-  """Signs the current APEX with the given payload/container keys.
+def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
+                         container_pw, apk_keys, codename_to_api_level_map,
+                         no_hashtree, signing_args=None):
+  """Signs the current uncompressed APEX with the given payload/container keys.
 
   Args:
-    apex_data: Raw APEX data.
+    apex_file: Uncompressed APEX file.
     payload_key: The path to payload signing key (w/ extension).
     container_key: The path to container signing key (w/o extension).
     container_pw: The matching password of the container_key, or None.
@@ -318,12 +326,6 @@
   Returns:
     The path to the signed APEX file.
   """
-  apex_file = common.MakeTempFile(prefix='apex-', suffix='.apex')
-  with open(apex_file, 'wb') as apex_fp:
-    apex_fp.write(apex_data)
-
-  APEX_PUBKEY = 'apex_pubkey'
-
   # 1. Extract the apex payload image and sign the containing apk files. Repack
   # the apex file after signing.
   apk_signer = ApexApkSigner(apex_file, container_pw,
@@ -380,3 +382,209 @@
       extra_signapk_args=extra_signapk_args)
 
   return signed_apex
+
+
+def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
+                         container_pw, apk_keys, codename_to_api_level_map,
+                         no_hashtree, signing_args=None):
+  """Signs the current compressed APEX with the given payload/container keys.
+
+  Args:
+    apex_file: Raw uncompressed APEX data.
+    payload_key: The path to payload signing key (w/ extension).
+    container_key: The path to container signing key (w/o extension).
+    container_pw: The matching password of the container_key, or None.
+    apk_keys: A dict that holds the signing keys for apk files.
+    codename_to_api_level_map: A dict that maps from codename to API level.
+    no_hashtree: Don't include hashtree in the signed APEX.
+    signing_args: Additional args to be passed to the payload signer.
+
+  Returns:
+    The path to the signed APEX file.
+  """
+  debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
+
+  # 1. Decompress original_apex inside compressed apex.
+  original_apex_file = common.MakeTempFile(prefix='original-apex-',
+                                           suffix='.apex')
+  # Decompression target path should not exist
+  os.remove(original_apex_file)
+  common.RunAndCheckOutput(['deapexer', '--debugfs_path', debugfs_path,
+                            'decompress', '--input', apex_file,
+                            '--output', original_apex_file])
+
+  # 2. Sign original_apex
+  signed_original_apex_file = SignUncompressedApex(
+      avbtool,
+      original_apex_file,
+      payload_key,
+      container_key,
+      container_pw,
+      apk_keys,
+      codename_to_api_level_map,
+      no_hashtree,
+      signing_args)
+
+  # 3. Compress signed original apex.
+  compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
+                                             suffix='.capex')
+  common.RunAndCheckOutput(['apex_compression_tool',
+                            'compress',
+                            '--apex_compression_tool_path', os.getenv('PATH'),
+                            '--input', signed_original_apex_file,
+                            '--output', compressed_apex_file])
+
+  # 4. Align apex
+  aligned_apex = common.MakeTempFile(prefix='apex-container-', suffix='.capex')
+  common.RunAndCheckOutput(['zipalign', '-f', '4096', compressed_apex_file,
+                            aligned_apex])
+
+  # 5. Sign the APEX container with container_key.
+  signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.capex')
+
+  # Specify the 4K alignment when calling SignApk.
+  extra_signapk_args = OPTIONS.extra_signapk_args[:]
+  extra_signapk_args.extend(['-a', '4096'])
+
+  password = container_pw.get(container_key) if container_pw else None
+  common.SignFile(
+      aligned_apex,
+      signed_apex,
+      container_key,
+      password,
+      codename_to_api_level_map=codename_to_api_level_map,
+      extra_signapk_args=extra_signapk_args)
+
+  return signed_apex
+
+
+def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
+             apk_keys, codename_to_api_level_map,
+             no_hashtree, signing_args=None):
+  """Signs the current APEX with the given payload/container keys.
+
+  Args:
+    apex_file: Path to apex file path.
+    payload_key: The path to payload signing key (w/ extension).
+    container_key: The path to container signing key (w/o extension).
+    container_pw: The matching password of the container_key, or None.
+    apk_keys: A dict that holds the signing keys for apk files.
+    codename_to_api_level_map: A dict that maps from codename to API level.
+    no_hashtree: Don't include hashtree in the signed APEX.
+    signing_args: Additional args to be passed to the payload signer.
+
+  Returns:
+    The path to the signed APEX file.
+  """
+  apex_file = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
+  with open(apex_file, 'wb') as output_fp:
+    output_fp.write(apex_data)
+
+  debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
+  cmd = ['deapexer', '--debugfs_path', debugfs_path,
+         'info', '--print-type', apex_file]
+
+  try:
+    apex_type = common.RunAndCheckOutput(cmd).strip()
+    if apex_type == 'UNCOMPRESSED':
+      return SignUncompressedApex(
+          avbtool,
+          apex_file,
+          payload_key=payload_key,
+          container_key=container_key,
+          container_pw=None,
+          codename_to_api_level_map=codename_to_api_level_map,
+          no_hashtree=no_hashtree,
+          apk_keys=apk_keys,
+          signing_args=signing_args)
+    elif apex_type == 'COMPRESSED':
+      return SignCompressedApex(
+          avbtool,
+          apex_file,
+          payload_key=payload_key,
+          container_key=container_key,
+          container_pw=None,
+          codename_to_api_level_map=codename_to_api_level_map,
+          no_hashtree=no_hashtree,
+          apk_keys=apk_keys,
+          signing_args=signing_args)
+    else:
+      # TODO(b/172912232): support signing compressed apex
+      raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
+
+  except common.ExternalError as e:
+    raise ApexInfoError(
+        'Failed to get type for {}:\n{}'.format(apex_file, e))
+
+def GetSystemApexInfoFromTargetFiles(input_file):
+  """
+  Get information about system APEX stored in the input_file zip
+
+  Args:
+    input_file: The filename of the target build target-files zip or directory.
+
+  Return:
+    A list of ota_metadata_pb2.ApexInfo() populated using the APEX stored in
+    /system partition of the input_file
+  """
+
+  # Extract the apex files so that we can run checks on them
+  if not isinstance(input_file, str):
+    raise RuntimeError("must pass filepath to target-files zip or directory")
+
+  if os.path.isdir(input_file):
+    tmp_dir = input_file
+  else:
+    tmp_dir = UnzipTemp(input_file, ["SYSTEM/apex/*"])
+  target_dir = os.path.join(tmp_dir, "SYSTEM/apex/")
+
+  # Partial target-files packages for vendor-only builds may not contain
+  # a system apex directory.
+  if not os.path.exists(target_dir):
+    return []
+
+  apex_infos = []
+
+  debugfs_path = "debugfs"
+  if OPTIONS.search_path:
+    debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
+  deapexer = 'deapexer'
+  if OPTIONS.search_path:
+    deapexer_path = os.path.join(OPTIONS.search_path, "bin", "deapexer")
+    if os.path.isfile(deapexer_path):
+      deapexer = deapexer_path
+  for apex_filename in os.listdir(target_dir):
+    apex_filepath = os.path.join(target_dir, apex_filename)
+    if not os.path.isfile(apex_filepath) or \
+        not zipfile.is_zipfile(apex_filepath):
+      logger.info("Skipping %s because it's not a zipfile", apex_filepath)
+      continue
+    apex_info = ota_metadata_pb2.ApexInfo()
+    # Open the apex file to retrieve information
+    manifest = apex_manifest.fromApex(apex_filepath)
+    apex_info.package_name = manifest.name
+    apex_info.version = manifest.version
+    # Check if the file is compressed or not
+    apex_type = RunAndCheckOutput([
+        deapexer, "--debugfs_path", debugfs_path,
+        'info', '--print-type', apex_filepath]).rstrip()
+    if apex_type == 'COMPRESSED':
+      apex_info.is_compressed = True
+    elif apex_type == 'UNCOMPRESSED':
+      apex_info.is_compressed = False
+    else:
+      raise RuntimeError('Not an APEX file: ' + apex_type)
+
+    # Decompress compressed APEX to determine its size
+    if apex_info.is_compressed:
+      decompressed_file_path = MakeTempFile(prefix="decompressed-",
+                                            suffix=".apex")
+      # Decompression target path should not exist
+      os.remove(decompressed_file_path)
+      RunAndCheckOutput([deapexer, 'decompress', '--input', apex_filepath,
+                         '--output', decompressed_file_path])
+      apex_info.decompressed_size = os.path.getsize(decompressed_file_path)
+
+      apex_infos.append(apex_info)
+
+  return apex_infos
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 820c128..3726df6 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -308,6 +308,10 @@
       build_command.extend(["-C", fs_config])
     if "selinux_fc" in prop_dict:
       build_command.extend(["-c", prop_dict["selinux_fc"]])
+    if "timestamp" in prop_dict:
+      build_command.extend(["-T", str(prop_dict["timestamp"])])
+    if "uuid" in prop_dict:
+      build_command.extend(["-U", prop_dict["uuid"]])
   elif fs_type.startswith("squash"):
     build_command = ["mksquashfsimage.sh"]
     build_command.extend([in_dir, out_file])
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 19b132b..0061819 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -109,10 +109,12 @@
 
 # The partitions allowed to be signed by AVB (Android Verified Boot 2.0). Note
 # that system_other is not in the list because we don't want to include its
-# descriptor into vbmeta.img.
-AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'recovery', 'system',
-                  'system_ext', 'vendor', 'vendor_boot', 'vendor_dlkm',
-                  'odm_dlkm')
+# descriptor into vbmeta.img. When adding a new entry here, the
+# AVB_FOOTER_ARGS_BY_PARTITION in sign_target_files_apks need to be updated
+# accordingly.
+AVB_PARTITIONS = ('boot', 'dtbo', 'odm', 'product', 'pvmfw', 'recovery',
+                  'system', 'system_ext', 'vendor', 'vendor_boot',
+                  'vendor_dlkm', 'odm_dlkm')
 
 # Chained VBMeta partitions.
 AVB_VBMETA_PARTITIONS = ('vbmeta_system', 'vbmeta_vendor')
@@ -276,29 +278,6 @@
   return subprocess.Popen(args, **kwargs)
 
 
-def RunAndWait(args, verbose=None, **kwargs):
-  """Runs the given command waiting for it to complete.
-
-  Args:
-    args: The command represented as a list of strings.
-    verbose: Whether the commands should be shown. Default to the global
-        verbosity if unspecified.
-    kwargs: Any additional args to be passed to subprocess.Popen(), such as env,
-        stdin, etc. stdout and stderr will default to subprocess.PIPE and
-        subprocess.STDOUT respectively unless caller specifies any of them.
-
-  Raises:
-    ExternalError: On non-zero exit from the command.
-  """
-  proc = Run(args, verbose=verbose, **kwargs)
-  proc.wait()
-
-  if proc.returncode != 0:
-    raise ExternalError(
-        "Failed to run command '{}' (exit code {})".format(
-            args, proc.returncode))
-
-
 def RunAndCheckOutput(args, verbose=None, **kwargs):
   """Runs the given command and returns the output.
 
@@ -663,7 +642,7 @@
   """Extracts the contents of fn from input zipfile or directory into a file."""
   if isinstance(input_file, zipfile.ZipFile):
     tmp_file = MakeTempFile(os.path.basename(fn))
-    with open(tmp_file, 'w') as f:
+    with open(tmp_file, 'wb') as f:
       f.write(input_file.read(fn))
     return tmp_file
   else:
@@ -887,8 +866,8 @@
     prop_file = GetBootImageBuildProp(boot_img)
     if prop_file is None:
       return ''
-    with open(prop_file) as f:
-      return f.read().decode()
+    with open(prop_file, "r") as f:
+      return f.read()
 
   @staticmethod
   def _ReadPartitionPropFile(input_file, name):
@@ -1714,6 +1693,11 @@
   cmd.extend(["--vendor_ramdisk", ramdisk_img.name])
   cmd.extend(["--vendor_boot", img.name])
 
+  fn = os.path.join(sourcedir, "vendor_bootconfig")
+  if os.access(fn, os.F_OK):
+    cmd.append("--vendor_bootconfig")
+    cmd.append(fn)
+
   ramdisk_fragment_imgs = []
   fn = os.path.join(sourcedir, "vendor_ramdisk_fragments")
   if os.access(fn, os.F_OK):
@@ -1957,12 +1941,13 @@
     # filename listed in system.map may contain an additional leading slash
     # (i.e. "//system/framework/am.jar"). Using lstrip to get consistent
     # results.
-    arcname = entry.replace(which, which.upper(), 1).lstrip('/')
-
-    # Special handling another case, where files not under /system
+    # And handle another special case, where files not under /system
     # (e.g. "/sbin/charger") are packed under ROOT/ in a target_files.zip.
-    if which == 'system' and not arcname.startswith('SYSTEM'):
+    arcname = entry.lstrip('/')
+    if which == 'system' and not arcname.startswith('system'):
       arcname = 'ROOT/' + arcname
+    else:
+      arcname = arcname.replace(which, which.upper(), 1)
 
     assert arcname in input_zip.namelist(), \
         "Failed to find the ZIP entry for {}".format(entry)
@@ -3672,14 +3657,13 @@
     RunAndCheckOutput(['toybox', 'cpio', '-F', abs_uncompressed_ramdisk, '-i'],
                cwd=extracted_ramdisk)
 
-    prop_file = None
     for search_path in RAMDISK_BUILD_PROP_REL_PATHS:
       prop_file = os.path.join(extracted_ramdisk, search_path)
       if os.path.isfile(prop_file):
-        break
+        return prop_file
       logger.warning('Unable to get boot image timestamp: no %s in ramdisk', search_path)
 
-    return prop_file
+    return None
 
   except ExternalError as e:
     logger.warning('Unable to get boot image build props: %s', e)
diff --git a/tools/releasetools/merge_target_files.py b/tools/releasetools/merge_target_files.py
index 9360d7b..16cab4f 100755
--- a/tools/releasetools/merge_target_files.py
+++ b/tools/releasetools/merge_target_files.py
@@ -93,6 +93,7 @@
 import subprocess
 import sys
 import zipfile
+from xml.etree import ElementTree
 
 import add_img_to_target_files
 import build_super_image
@@ -658,6 +659,80 @@
       os.path.join(output_target_files_dir, 'META', 'vendor_file_contexts.bin'))
 
 
+def compile_split_sepolicy(product_out, partition_map, output_policy):
+  """Uses secilc to compile a split sepolicy file.
+
+  Depends on various */etc/selinux/* and */etc/vintf/* files within partitions.
+
+  Args:
+    product_out: PRODUCT_OUT directory, containing partition directories.
+    partition_map: A map of partition name -> relative path within product_out.
+    output_policy: The name of the output policy created by secilc.
+
+  Returns:
+    A command list that can be executed to create the compiled sepolicy.
+  """
+
+  def get_file(partition, path):
+    if partition not in partition_map:
+      logger.warning('Cannot load SEPolicy files for missing partition %s',
+                     partition)
+      return None
+    return os.path.join(product_out, partition_map[partition], path)
+
+  # Load the kernel sepolicy version from the FCM. This is normally provided
+  # directly to selinux.cpp as a build flag, but is also available in this file.
+  fcm_file = get_file('system', 'etc/vintf/compatibility_matrix.device.xml')
+  if not fcm_file or not os.path.exists(fcm_file):
+    raise ExternalError('Missing required file for loading sepolicy: %s', fcm)
+  kernel_sepolicy_version = ElementTree.parse(fcm_file).getroot().find(
+      'sepolicy/kernel-sepolicy-version').text
+
+  # Load the vendor's plat sepolicy version. This is the version used for
+  # locating sepolicy mapping files.
+  vendor_plat_version_file = get_file('vendor',
+                                      'etc/selinux/plat_sepolicy_vers.txt')
+  if not vendor_plat_version_file or not os.path.exists(
+      vendor_plat_version_file):
+    raise ExternalError('Missing required sepolicy file %s',
+                        vendor_plat_version_file)
+  with open(vendor_plat_version_file) as f:
+    vendor_plat_version = f.read().strip()
+
+  # Use the same flags and arguments as selinux.cpp OpenSplitPolicy().
+  cmd = ['secilc', '-m', '-M', 'true', '-G', '-N']
+  cmd.extend(['-c', kernel_sepolicy_version])
+  cmd.extend(['-o', output_policy])
+  cmd.extend(['-f', '/dev/null'])
+
+  required_policy_files = (
+      ('system', 'etc/selinux/plat_sepolicy.cil'),
+      ('system', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('vendor', 'etc/selinux/vendor_sepolicy.cil'),
+      ('vendor', 'etc/selinux/plat_pub_versioned.cil'),
+  )
+  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+                     required_policy_files)):
+    if not policy or not os.path.exists(policy):
+      raise ExternalError('Missing required sepolicy file %s', policy)
+    cmd.append(policy)
+
+  optional_policy_files = (
+      ('system', 'etc/selinux/mapping/%s.compat.cil' % vendor_plat_version),
+      ('system_ext', 'etc/selinux/system_ext_sepolicy.cil'),
+      ('system_ext', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('product', 'etc/selinux/product_sepolicy.cil'),
+      ('product', 'etc/selinux/mapping/%s.cil' % vendor_plat_version),
+      ('odm', 'etc/selinux/odm_sepolicy.cil'),
+  )
+  for policy in (map(lambda partition_and_path: get_file(*partition_and_path),
+                     optional_policy_files)):
+    if policy and os.path.exists(policy):
+      cmd.append(policy)
+
+  return cmd
+
+
 def process_special_cases(framework_target_files_temp_dir,
                           vendor_target_files_temp_dir,
                           output_target_files_temp_dir,
@@ -887,12 +962,12 @@
       output_zip,
       '-C',
       source_dir,
-      '-l',
+      '-r',
       output_target_files_list,
   ]
 
   logger.info('creating %s', output_file)
-  common.RunAndWait(command, verbose=True)
+  common.RunAndCheckOutput(command, verbose=True)
   logger.info('finished creating %s', output_file)
 
   return output_zip
@@ -977,17 +1052,28 @@
       raise ValueError('sharedUserId APK error. See %s' %
                        shareduid_violation_modules)
 
-  # Run host_init_verifier on the combined init rc files.
+  # host_init_verifier and secilc check only the following partitions:
   filtered_partitions = {
       partition: path
       for partition, path in partition_map.items()
-      # host_init_verifier checks only the following partitions:
       if partition in ['system', 'system_ext', 'product', 'vendor', 'odm']
   }
+
+  # Run host_init_verifier on the combined init rc files.
   common.RunHostInitVerifier(
       product_out=output_target_files_temp_dir,
       partition_map=filtered_partitions)
 
+  # Check that the split sepolicy from the multiple builds can compile.
+  split_sepolicy_cmd = compile_split_sepolicy(
+      product_out=output_target_files_temp_dir,
+      partition_map=filtered_partitions,
+      output_policy=os.path.join(output_target_files_temp_dir,
+                                 'META/combined.policy'))
+  logger.info('Compiling split sepolicy: %s', ' '.join(split_sepolicy_cmd))
+  common.RunAndCheckOutput(split_sepolicy_cmd)
+  # TODO(b/178864050): Run tests on the combined.policy file.
+
   generate_images(output_target_files_temp_dir, rebuild_recovery)
 
   generate_super_empty_image(output_target_files_temp_dir, output_super_empty)
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 6b82d32..0acd48a 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -211,6 +211,10 @@
       Use the specified custom_image to update custom_partition when generating
       an A/B OTA package. e.g. "--custom_image oem=oem.img --custom_image
       cus=cus_test.img"
+
+  --disable_vabc
+      Disable Virtual A/B Compression, for builds that have compression enabled
+      by default.
 """
 
 from __future__ import print_function
@@ -229,11 +233,11 @@
 
 import common
 import ota_utils
+from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
+                       PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME)
 import target_files_diff
 from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
-from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PropertyFiles)
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -272,6 +276,8 @@
 OPTIONS.disable_verity_computation = False
 OPTIONS.partial = None
 OPTIONS.custom_images = {}
+OPTIONS.disable_vabc = False
+OPTIONS.spl_downgrade = False
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -291,6 +297,8 @@
     'vendor_boot']
 
 
+
+
 class PayloadSigner(object):
   """A class that wraps the payload signing works.
 
@@ -961,7 +969,8 @@
       for part in partition_state]
   return ["--partition_timestamps", ",".join(partition_timestamps)]
 
-def GeneratePartitionTimestampFlagsDowngrade(pre_partition_state, post_partition_state):
+def GeneratePartitionTimestampFlagsDowngrade(
+    pre_partition_state, post_partition_state):
   assert pre_partition_state is not None
   partition_timestamps = {}
   for part in pre_partition_state:
@@ -970,9 +979,9 @@
     partition_timestamps[part.partition_name] = \
       max(part.version, partition_timestamps[part.partition_name])
   return [
-    "--partition_timestamps",
-    ",".join([key + ":" + val for (key, val) in partition_timestamps.items()])
-    ]
+      "--partition_timestamps",
+      ",".join([key + ":" + val for (key, val) in partition_timestamps.items()])
+  ]
 
 def IsSparseImage(filepath):
   with open(filepath, 'rb') as fp:
@@ -1026,7 +1035,8 @@
   else:
     staging_file = output_file
   output_zip = zipfile.ZipFile(staging_file, "w",
-                               compression=zipfile.ZIP_DEFLATED, allowZip64=True)
+                               compression=zipfile.ZIP_DEFLATED,
+                               allowZip64=True)
 
   if source_file is not None:
     assert "ab_partitions" in OPTIONS.source_info_dict, \
@@ -1080,25 +1090,27 @@
   if OPTIONS.downgrade:
     max_timestamp = source_info.GetBuildProp("ro.build.date.utc")
     partition_timestamps_flags = GeneratePartitionTimestampFlagsDowngrade(
-      metadata.precondition.partition_state,
-      metadata.postcondition.partition_state
-      )
+        metadata.precondition.partition_state,
+        metadata.postcondition.partition_state
+    )
   else:
     max_timestamp = str(metadata.postcondition.timestamp)
     partition_timestamps_flags = GeneratePartitionTimestampFlags(
         metadata.postcondition.partition_state)
 
+  if OPTIONS.disable_vabc:
+    additional_args += ["--disable_vabc", "true"]
   additional_args += ["--max_timestamp", max_timestamp]
 
   if SupportsMainlineGkiUpdates(source_file):
-    logger.warn("Detected build with mainline GKI, include full boot image.")
+    logger.warning("Detected build with mainline GKI, include full boot image.")
     additional_args.extend(["--full_boot", "true"])
 
   payload.Generate(
       target_file,
       source_file,
       additional_args + partition_timestamps_flags
-   )
+  )
 
   # Sign the payload.
   payload_signer = PayloadSigner()
@@ -1117,7 +1129,7 @@
     secondary_payload = Payload(secondary=True)
     secondary_payload.Generate(secondary_target_file,
                                additional_args=["--max_timestamp",
-                               max_timestamp])
+                                                max_timestamp])
     secondary_payload.Sign(payload_signer)
     secondary_payload.WriteToZip(output_zip)
 
@@ -1125,7 +1137,7 @@
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
   if (target_info.get("verity") == "true" or
-          target_info.get("avb_enable") == "true"):
+      target_info.get("avb_enable") == "true"):
     care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
                      "META/" + x in target_zip.namelist()]
 
@@ -1140,6 +1152,15 @@
     else:
       logger.warning("Cannot find care map file in target_file package")
 
+  # Copy apex_info.pb over to generated OTA package.
+  try:
+    apex_info_entry = target_zip.getinfo("META/apex_info.pb")
+    with target_zip.open(apex_info_entry, "r") as zfp:
+      common.ZipWriteStr(output_zip, "apex_info.pb", zfp.read(),
+                        compress_type=zipfile.ZIP_STORED)
+  except KeyError:
+    logger.warning("target_file doesn't contain apex_info.pb %s", target_file)
+
   common.ZipClose(target_zip)
 
   CheckVintfIfTrebleEnabled(target_file, target_info)
@@ -1246,6 +1267,10 @@
     elif o == "--custom_image":
       custom_partition, custom_image = a.split("=")
       OPTIONS.custom_images[custom_partition] = custom_image
+    elif o == "--disable_vabc":
+      OPTIONS.disable_vabc = True
+    elif o == "--spl_downgrade":
+      OPTIONS.spl_downgrade = True
     else:
       return False
     return True
@@ -1287,6 +1312,8 @@
                                  "boot_variable_file=",
                                  "partial=",
                                  "custom_image=",
+                                 "disable_vabc",
+                                 "spl_downgrade"
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
@@ -1334,13 +1361,14 @@
   if OPTIONS.partial:
     OPTIONS.info_dict['ab_partitions'] = \
       list(
-        set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
-        )
+          set(OPTIONS.info_dict['ab_partitions']) & set(OPTIONS.partial)
+      )
     if OPTIONS.source_info_dict:
       OPTIONS.source_info_dict['ab_partitions'] = \
         list(
-          set(OPTIONS.source_info_dict['ab_partitions']) & set(OPTIONS.partial)
-          )
+            set(OPTIONS.source_info_dict['ab_partitions']) &
+            set(OPTIONS.partial)
+        )
 
   # Load OEM dicts if provided.
   OPTIONS.oem_dicts = _LoadOemDicts(OPTIONS.oem_source)
@@ -1349,7 +1377,7 @@
   # use_dynamic_partitions but target build does.
   if (OPTIONS.source_info_dict and
       OPTIONS.source_info_dict.get("use_dynamic_partitions") != "true" and
-          OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
+      OPTIONS.target_info_dict.get("use_dynamic_partitions") == "true"):
     if OPTIONS.target_info_dict.get("dynamic_partition_retrofit") != "true":
       raise common.ExternalError(
           "Expect to generate incremental OTA for retrofitting dynamic "
@@ -1365,7 +1393,8 @@
   ab_update = OPTIONS.info_dict.get("ab_update") == "true"
   allow_non_ab = OPTIONS.info_dict.get("allow_non_ab") == "true"
   if OPTIONS.force_non_ab:
-    assert allow_non_ab, "--force_non_ab only allowed on devices that supports non-A/B"
+    assert allow_non_ab,\
+      "--force_non_ab only allowed on devices that supports non-A/B"
     assert ab_update, "--force_non_ab only allowed on A/B devices"
 
   generate_ab = not OPTIONS.force_non_ab and ab_update
@@ -1380,7 +1409,33 @@
           "build/make/target/product/security/testkey")
     # Get signing keys
     OPTIONS.key_passwords = common.GetKeyPasswords([OPTIONS.package_key])
+    private_key_path = OPTIONS.package_key + OPTIONS.private_key_suffix
+    if not os.path.exists(private_key_path):
+      raise common.ExternalError(
+                        "Private key {} doesn't exist. Make sure you passed the"
+                        " correct key path through -k option".format(
+                          private_key_path)
+                          )
 
+  if OPTIONS.source_info_dict:
+    source_build_prop = OPTIONS.source_info_dict["build.prop"]
+    target_build_prop = OPTIONS.target_info_dict["build.prop"]
+    source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
+    target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
+    is_spl_downgrade = target_spl < source_spl
+    if is_spl_downgrade and not OPTIONS.spl_downgrade:
+      raise common.ExternalError(
+        "Target security patch level {} is older than source SPL {} applying "
+        "such OTA will likely cause device fail to boot. Pass --spl_downgrade "
+        "to override this check. This script expects security patch level to "
+        "be in format yyyy-mm-dd (e.x. 2021-02-05). It's possible to use "
+        "separators other than -, so as long as it's used consistenly across "
+        "all SPL dates".format(target_spl, source_spl))
+    elif not is_spl_downgrade and OPTIONS.spl_downgrade:
+      raise ValueError("--spl_downgrade specified but no actual SPL downgrade"
+                       " detected. Please only pass in this flag if you want a"
+                       " SPL downgrade. Target SPL: {} Source SPL: {}"
+                       .format(target_spl, source_spl))
   if generate_ab:
     GenerateAbOtaPackage(
         target_file=args[0],
diff --git a/tools/releasetools/ota_metadata.proto b/tools/releasetools/ota_metadata.proto
index 20d3091..7aaca6f 100644
--- a/tools/releasetools/ota_metadata.proto
+++ b/tools/releasetools/ota_metadata.proto
@@ -16,8 +16,8 @@
 
 // If you change this file,
 // Please update ota_metadata_pb2.py by executing
-// protoc ota_metadata.proto --python_out $ANDROID_BUILD_TOP/build/tools/releasetools
-
+// protoc ota_metadata.proto --python_out
+// $ANDROID_BUILD_TOP/build/tools/releasetools
 
 syntax = "proto3";
 
@@ -65,6 +65,19 @@
   repeated PartitionState partition_state = 7;
 }
 
+message ApexInfo {
+  string package_name = 1;
+  int64 version = 2;
+  bool is_compressed = 3;
+  int64 decompressed_size = 4;
+}
+
+// Just a container to hold repeated apex_info, so that we can easily serialize
+// a list of apex_info to string.
+message ApexMetadata {
+  repeated ApexInfo apex_info = 1;
+}
+
 // The metadata of an OTA package. It contains the information of the package
 // and prerequisite to install the update correctly.
 message OtaMetadata {
@@ -92,4 +105,7 @@
   bool retrofit_dynamic_partitions = 7;
   // The required size of the cache partition, only valid for non-A/B update.
   int64 required_cache = 8;
+
+  // True iff security patch level downgrade is permitted on this OTA.
+  bool spl_downgrade = 9;
 }
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
index ff2b2c5..2552464 100644
--- a/tools/releasetools/ota_metadata_pb2.py
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -2,6 +2,8 @@
 # Generated by the protocol buffer compiler.  DO NOT EDIT!
 # source: ota_metadata.proto
 
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
 from google.protobuf import descriptor as _descriptor
 from google.protobuf import message as _message
 from google.protobuf import reflection as _reflection
@@ -17,8 +19,8 @@
   name='ota_metadata.proto',
   package='build.tools.releasetools',
   syntax='proto3',
-  serialized_options=b'H\003',
-  serialized_pb=b'\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"\xe1\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3'
+  serialized_options=_b('H\003'),
+  serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3')
 )
 
 
@@ -48,8 +50,8 @@
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=777,
-  serialized_end=829,
+  serialized_start=972,
+  serialized_end=1024,
 )
 _sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
 
@@ -64,7 +66,7 @@
     _descriptor.FieldDescriptor(
       name='partition_name', full_name='build.tools.releasetools.PartitionState.partition_name', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
@@ -85,7 +87,7 @@
     _descriptor.FieldDescriptor(
       name='version', full_name='build.tools.releasetools.PartitionState.version', index=3,
       number=4, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
@@ -130,7 +132,7 @@
     _descriptor.FieldDescriptor(
       name='build_incremental', full_name='build.tools.releasetools.DeviceState.build_incremental', index=2,
       number=3, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
@@ -144,14 +146,14 @@
     _descriptor.FieldDescriptor(
       name='sdk_level', full_name='build.tools.releasetools.DeviceState.sdk_level', index=4,
       number=5, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
     _descriptor.FieldDescriptor(
       name='security_patch_level', full_name='build.tools.releasetools.DeviceState.security_patch_level', index=5,
       number=6, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
@@ -179,6 +181,89 @@
 )
 
 
+_APEXINFO = _descriptor.Descriptor(
+  name='ApexInfo',
+  full_name='build.tools.releasetools.ApexInfo',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='package_name', full_name='build.tools.releasetools.ApexInfo.package_name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='version', full_name='build.tools.releasetools.ApexInfo.version', index=1,
+      number=2, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='is_compressed', full_name='build.tools.releasetools.ApexInfo.is_compressed', index=2,
+      number=3, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='decompressed_size', full_name='build.tools.releasetools.ApexInfo.decompressed_size', index=3,
+      number=4, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=347,
+  serialized_end=446,
+)
+
+
+_APEXMETADATA = _descriptor.Descriptor(
+  name='ApexMetadata',
+  full_name='build.tools.releasetools.ApexMetadata',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='apex_info', full_name='build.tools.releasetools.ApexMetadata.apex_info', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  serialized_options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=448,
+  serialized_end=517,
+)
+
+
 _OTAMETADATA_PROPERTYFILESENTRY = _descriptor.Descriptor(
   name='PropertyFilesEntry',
   full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry',
@@ -189,14 +274,14 @@
     _descriptor.FieldDescriptor(
       name='key', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.key', index=0,
       number=1, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
     _descriptor.FieldDescriptor(
       name='value', full_name='build.tools.releasetools.OtaMetadata.PropertyFilesEntry.value', index=1,
       number=2, type=9, cpp_type=9, label=1,
-      has_default_value=False, default_value=b"".decode('utf-8'),
+      has_default_value=False, default_value=_b("").decode('utf-8'),
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
@@ -206,14 +291,14 @@
   nested_types=[],
   enum_types=[
   ],
-  serialized_options=b'8\001',
+  serialized_options=_b('8\001'),
   is_extendable=False,
   syntax='proto3',
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=723,
-  serialized_end=775,
+  serialized_start=918,
+  serialized_end=970,
 )
 
 _OTAMETADATA = _descriptor.Descriptor(
@@ -279,6 +364,13 @@
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='spl_downgrade', full_name='build.tools.releasetools.OtaMetadata.spl_downgrade', index=8,
+      number=9, type=8, cpp_type=7, label=1,
+      has_default_value=False, default_value=False,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
   ],
   extensions=[
   ],
@@ -292,11 +384,12 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=348,
-  serialized_end=829,
+  serialized_start=520,
+  serialized_end=1024,
 )
 
 _DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
+_APEXMETADATA.fields_by_name['apex_info'].message_type = _APEXINFO
 _OTAMETADATA_PROPERTYFILESENTRY.containing_type = _OTAMETADATA
 _OTAMETADATA.fields_by_name['type'].enum_type = _OTAMETADATA_OTATYPE
 _OTAMETADATA.fields_by_name['property_files'].message_type = _OTAMETADATA_PROPERTYFILESENTRY
@@ -305,6 +398,8 @@
 _OTAMETADATA_OTATYPE.containing_type = _OTAMETADATA
 DESCRIPTOR.message_types_by_name['PartitionState'] = _PARTITIONSTATE
 DESCRIPTOR.message_types_by_name['DeviceState'] = _DEVICESTATE
+DESCRIPTOR.message_types_by_name['ApexInfo'] = _APEXINFO
+DESCRIPTOR.message_types_by_name['ApexMetadata'] = _APEXMETADATA
 DESCRIPTOR.message_types_by_name['OtaMetadata'] = _OTAMETADATA
 _sym_db.RegisterFileDescriptor(DESCRIPTOR)
 
@@ -322,6 +417,20 @@
   })
 _sym_db.RegisterMessage(DeviceState)
 
+ApexInfo = _reflection.GeneratedProtocolMessageType('ApexInfo', (_message.Message,), {
+  'DESCRIPTOR' : _APEXINFO,
+  '__module__' : 'ota_metadata_pb2'
+  # @@protoc_insertion_point(class_scope:build.tools.releasetools.ApexInfo)
+  })
+_sym_db.RegisterMessage(ApexInfo)
+
+ApexMetadata = _reflection.GeneratedProtocolMessageType('ApexMetadata', (_message.Message,), {
+  'DESCRIPTOR' : _APEXMETADATA,
+  '__module__' : 'ota_metadata_pb2'
+  # @@protoc_insertion_point(class_scope:build.tools.releasetools.ApexMetadata)
+  })
+_sym_db.RegisterMessage(ApexMetadata)
+
 OtaMetadata = _reflection.GeneratedProtocolMessageType('OtaMetadata', (_message.Message,), {
 
   'PropertyFilesEntry' : _reflection.GeneratedProtocolMessageType('PropertyFilesEntry', (_message.Message,), {
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 6bbcc92..104f02f 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -39,6 +39,8 @@
 METADATA_NAME = 'META-INF/com/android/metadata'
 METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
 UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
+
 
 def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
   """Finalizes the metadata and signs an A/B OTA package.
@@ -168,7 +170,7 @@
     build_info_set = ComputeRuntimeBuildInfos(build_info,
                                               boot_variable_values)
     assert "ab_partitions" in build_info.info_dict,\
-      "ab_partitions property required for ab update."
+        "ab_partitions property required for ab update."
     ab_partitions = set(build_info.info_dict.get("ab_partitions"))
 
     # delta_generator will error out on unused timestamps,
@@ -317,6 +319,8 @@
     metadata_dict['pre-build'] = separator.join(pre_build.build)
     metadata_dict['pre-build-incremental'] = pre_build.build_incremental
 
+  if metadata_proto.spl_downgrade:
+    metadata_dict['spl-downgrade'] = 'yes'
   metadata_dict.update(metadata_proto.property_files)
 
   return metadata_dict
@@ -330,6 +334,9 @@
   pre_timestamp = source_info.GetBuildProp("ro.build.date.utc")
   is_downgrade = int(post_timestamp) < int(pre_timestamp)
 
+  if OPTIONS.spl_downgrade:
+    metadata_proto.spl_downgrade = True
+
   if OPTIONS.downgrade:
     if not is_downgrade:
       raise RuntimeError(
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index e8674b6..00acd98 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -178,19 +178,31 @@
 
 
 AVB_FOOTER_ARGS_BY_PARTITION = {
-    'boot' : 'avb_boot_add_hash_footer_args',
-    'dtbo' : 'avb_dtbo_add_hash_footer_args',
-    'recovery' : 'avb_recovery_add_hash_footer_args',
-    'system' : 'avb_system_add_hashtree_footer_args',
-    'system_other' : 'avb_system_other_add_hashtree_footer_args',
-    'vendor' : 'avb_vendor_add_hashtree_footer_args',
-    'vendor_boot' : 'avb_vendor_boot_add_hash_footer_args',
-    'vbmeta' : 'avb_vbmeta_args',
-    'vbmeta_system' : 'avb_vbmeta_system_args',
-    'vbmeta_vendor' : 'avb_vbmeta_vendor_args',
+    'boot': 'avb_boot_add_hash_footer_args',
+    'dtbo': 'avb_dtbo_add_hash_footer_args',
+    'product': 'avb_product_add_hashtree_footer_args',
+    'recovery': 'avb_recovery_add_hash_footer_args',
+    'system': 'avb_system_add_hashtree_footer_args',
+    'system_ext': 'avb_system_ext_add_hashtree_footer_args',
+    'system_other': 'avb_system_other_add_hashtree_footer_args',
+    'odm': 'avb_odm_add_hashtree_footer_args',
+    'odm_dlkm': 'avb_odm_dlkm_add_hashtree_footer_args',
+    'pvmfw': 'avb_pvmfw_add_hash_footer_args',
+    'vendor': 'avb_vendor_add_hashtree_footer_args',
+    'vendor_boot': 'avb_vendor_boot_add_hash_footer_args',
+    'vendor_dlkm': "avb_vendor_dlkm_add_hashtree_footer_args",
+    'vbmeta': 'avb_vbmeta_args',
+    'vbmeta_system': 'avb_vbmeta_system_args',
+    'vbmeta_vendor': 'avb_vbmeta_vendor_args',
 }
 
 
+# Check that AVB_FOOTER_ARGS_BY_PARTITION is in sync with AVB_PARTITIONS.
+for partition in common.AVB_PARTITIONS:
+  if partition not in AVB_FOOTER_ARGS_BY_PARTITION:
+    raise RuntimeError("Missing {} in AVB_FOOTER_ARGS".format(partition))
+
+
 def GetApkCerts(certmap):
   # apply the key remapping to the contents of the file
   for apk, cert in certmap.items():
@@ -329,9 +341,8 @@
   """
   unknown_files = []
   for info in input_tf_zip.infolist():
-    # Handle APEXes first, e.g. SYSTEM/apex/com.android.tzdata.apex.
-    if (info.filename.startswith('SYSTEM/apex') and
-        info.filename.endswith('.apex')):
+    # Handle APEXes on all partitions
+    if info.filename.endswith('.apex'):
       name = os.path.basename(info.filename)
       if name not in known_keys:
         unknown_files.append(name)
@@ -363,8 +374,7 @@
 
   invalid_apexes = []
   for info in input_tf_zip.infolist():
-    if (not info.filename.startswith('SYSTEM/apex') or
-        not info.filename.endswith('.apex')):
+    if not info.filename.endswith('.apex'):
       continue
 
     name = os.path.basename(info.filename)
@@ -445,6 +455,25 @@
   return data
 
 
+def IsBuildPropFile(filename):
+  return filename in (
+        "SYSTEM/etc/prop.default",
+        "BOOT/RAMDISK/prop.default",
+        "RECOVERY/RAMDISK/prop.default",
+
+        "VENDOR_BOOT/RAMDISK/default.prop",
+        "VENDOR_BOOT/RAMDISK/prop.default",
+
+        # ROOT/default.prop is a legacy path, but may still exist for upgrading
+        # devices that don't support `property_overrides_split_enabled`.
+        "ROOT/default.prop",
+
+        # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
+        # as a symlink in the current code. So it's a no-op here. Keeping the
+        # path here for clarity.
+        "RECOVERY/RAMDISK/default.prop") or filename.endswith("build.prop")
+
+
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
@@ -497,8 +526,8 @@
             "        (skipped due to special cert string)" % (name,))
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
-    # Sign bundled APEX files.
-    elif filename.startswith("SYSTEM/apex") and filename.endswith(".apex"):
+    # Sign bundled APEX files on all partitions
+    elif filename.endswith(".apex"):
       name = os.path.basename(filename)
       payload_key, container_key = apex_keys[name]
 
@@ -528,39 +557,8 @@
             "        (skipped due to special cert string)" % (name,))
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
-    # AVB public keys for the installed APEXes, which will be updated later.
-    elif (os.path.dirname(filename) == 'SYSTEM/etc/security/apex' and
-          filename != 'SYSTEM/etc/security/apex/'):
-      continue
-
     # System properties.
-    elif filename in (
-        "SYSTEM/build.prop",
-
-        "VENDOR/build.prop",
-        "SYSTEM/vendor/build.prop",
-
-        "ODM/etc/build.prop",
-        "VENDOR/odm/etc/build.prop",
-
-        "PRODUCT/build.prop",
-        "SYSTEM/product/build.prop",
-
-        "SYSTEM_EXT/build.prop",
-        "SYSTEM/system_ext/build.prop",
-
-        "SYSTEM/etc/prop.default",
-        "BOOT/RAMDISK/prop.default",
-        "RECOVERY/RAMDISK/prop.default",
-
-        # ROOT/default.prop is a legacy path, but may still exist for upgrading
-        # devices that don't support `property_overrides_split_enabled`.
-        "ROOT/default.prop",
-
-        # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
-        # as a symlink in the current code. So it's a no-op here. Keeping the
-        # path here for clarity.
-        "RECOVERY/RAMDISK/default.prop"):
+    elif IsBuildPropFile(filename):
       print("Rewriting %s:" % (filename,))
       if stat.S_ISLNK(info.external_attr >> 16):
         new_data = data
@@ -588,12 +586,7 @@
 
     # Don't copy OTA certs if we're replacing them.
     # Replacement of update-payload-key.pub.pem was removed in b/116660991.
-    elif (
-        OPTIONS.replace_ota_keys and
-        filename in (
-            "BOOT/RAMDISK/system/etc/security/otacerts.zip",
-            "RECOVERY/RAMDISK/system/etc/security/otacerts.zip",
-            "SYSTEM/etc/security/otacerts.zip")):
+    elif OPTIONS.replace_ota_keys and filename.endswith("/otacerts.zip"):
       pass
 
     # Skip META/misc_info.txt since we will write back the new values later.
@@ -626,6 +619,10 @@
     elif filename in ["META/care_map.pb", "META/care_map.txt"]:
       pass
 
+    # Skip apex_info.pb because we sign/modify apexes
+    elif filename == "META/apex_info.pb":
+      pass
+
     # Updates system_other.avbpubkey in /product/etc/.
     elif filename in (
         "PRODUCT/etc/security/avb/system_other.avbpubkey",
@@ -857,21 +854,12 @@
     print("META/otakeys.txt has no keys; using %s for OTA package"
           " verification." % (mapped_keys[0],))
 
-  # recovery now uses the same x509.pem version of the keys.
-  # extra_recovery_keys are used only in recovery.
-  if misc_info.get("recovery_as_boot") == "true":
-    recovery_keys_location = "BOOT/RAMDISK/system/etc/security/otacerts.zip"
-  else:
-    recovery_keys_location = "RECOVERY/RAMDISK/system/etc/security/otacerts.zip"
-
-  WriteOtacerts(output_tf_zip, recovery_keys_location,
-                mapped_keys + extra_recovery_keys)
-
-  # SystemUpdateActivity uses the x509.pem version of the keys, but
-  # put into a zipfile system/etc/security/otacerts.zip.
-  # We DO NOT include the extra_recovery_keys (if any) here.
-  WriteOtacerts(output_tf_zip, "SYSTEM/etc/security/otacerts.zip", mapped_keys)
-
+  otacerts = [info
+              for info in input_tf_zip.infolist()
+              if info.filename.endswith("/otacerts.zip")]
+  for info in otacerts:
+    print("Rewriting OTA key:", info.filename, mapped_keys)
+    WriteOtacerts(output_tf_zip, info.filename, mapped_keys)
 
 
 def ReplaceVerityPublicKey(output_zip, filename, key_path):
diff --git a/tools/releasetools/test_merge_target_files.py b/tools/releasetools/test_merge_target_files.py
index 7ea7f96..072bb01 100644
--- a/tools/releasetools/test_merge_target_files.py
+++ b/tools/releasetools/test_merge_target_files.py
@@ -18,12 +18,11 @@
 
 import common
 import test_utils
-from merge_target_files import (validate_config_lists,
-                                DEFAULT_FRAMEWORK_ITEM_LIST,
-                                DEFAULT_VENDOR_ITEM_LIST,
-                                DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
-                                item_list_to_partition_set,
-                                process_apex_keys_apk_certs_common)
+from merge_target_files import (
+    validate_config_lists, DEFAULT_FRAMEWORK_ITEM_LIST,
+    DEFAULT_VENDOR_ITEM_LIST, DEFAULT_FRAMEWORK_MISC_INFO_KEYS, copy_items,
+    item_list_to_partition_set, process_apex_keys_apk_certs_common,
+    compile_split_sepolicy)
 
 
 class MergeTargetFilesTest(test_utils.ReleaseToolsTestCase):
@@ -235,3 +234,43 @@
     ]
     partition_set = item_list_to_partition_set(item_list)
     self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
+
+  def test_compile_split_sepolicy(self):
+    product_out_dir = common.MakeTempDir()
+
+    def write_temp_file(path, data=''):
+      full_path = os.path.join(product_out_dir, path)
+      if not os.path.exists(os.path.dirname(full_path)):
+        os.makedirs(os.path.dirname(full_path))
+      with open(full_path, 'w') as f:
+        f.write(data)
+
+    write_temp_file(
+        'system/etc/vintf/compatibility_matrix.device.xml', """
+      <compatibility-matrix>
+        <sepolicy>
+          <kernel-sepolicy-version>30</kernel-sepolicy-version>
+        </sepolicy>
+      </compatibility-matrix>""")
+    write_temp_file('vendor/etc/selinux/plat_sepolicy_vers.txt', '30.0')
+
+    write_temp_file('system/etc/selinux/plat_sepolicy.cil')
+    write_temp_file('system/etc/selinux/mapping/30.0.cil')
+    write_temp_file('product/etc/selinux/mapping/30.0.cil')
+    write_temp_file('vendor/etc/selinux/vendor_sepolicy.cil')
+    write_temp_file('vendor/etc/selinux/plat_pub_versioned.cil')
+
+    cmd = compile_split_sepolicy(product_out_dir, {
+        'system': 'system',
+        'product': 'product',
+        'vendor': 'vendor',
+    }, os.path.join(product_out_dir, 'policy'))
+    self.assertEqual(' '.join(cmd),
+                     ('secilc -m -M true -G -N -c 30 '
+                      '-o {OTP}/policy -f /dev/null '
+                      '{OTP}/system/etc/selinux/plat_sepolicy.cil '
+                      '{OTP}/system/etc/selinux/mapping/30.0.cil '
+                      '{OTP}/vendor/etc/selinux/vendor_sepolicy.cil '
+                      '{OTP}/vendor/etc/selinux/plat_pub_versioned.cil '
+                      '{OTP}/product/etc/selinux/mapping/30.0.cil').format(
+                          OTP=product_out_dir))
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 8bf7778..9f64849 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -33,10 +33,11 @@
     GetTargetFilesZipWithoutPostinstallConfig,
     Payload, PayloadSigner, POSTINSTALL_CONFIG,
     StreamingPropertyFiles, AB_PARTITIONS)
+from apex_utils import GetSystemApexInfoFromTargetFiles
 from test_utils import PropertyFilesTestCase
 
 
-def construct_target_files(secondary=False):
+def construct_target_files(secondary=False, compressedApex=False):
   """Returns a target-files.zip file for generating OTA packages."""
   target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
   with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
@@ -78,6 +79,11 @@
       target_files_zip.writestr('IMAGES/system_other.img',
                                 os.urandom(len("system_other")))
 
+    if compressedApex:
+      apex_file_name = 'com.android.apex.compressed.v1.capex'
+      apex_file = os.path.join(test_utils.get_current_dir(), apex_file_name)
+      target_files_zip.write(apex_file, 'SYSTEM/apex/' + apex_file_name)
+
   return target_files
 
 
@@ -274,6 +280,21 @@
         },
         metadata)
 
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_GetSystemApexInfoFromTargetFiles(self):
+    target_files = construct_target_files(compressedApex=True)
+    apex_infos = GetSystemApexInfoFromTargetFiles(target_files)
+    self.assertEqual(len(apex_infos), 1)
+    self.assertEqual(apex_infos[0].package_name, "com.android.apex.compressed")
+    self.assertEqual(apex_infos[0].version, 1)
+    self.assertEqual(apex_infos[0].is_compressed, True)
+    # Compare the decompressed APEX size with the original uncompressed APEX
+    original_apex_name = 'com.android.apex.compressed.v1_original.apex'
+    original_apex_filepath = os.path.join(
+        test_utils.get_current_dir(), original_apex_name)
+    uncompressed_apex_size = os.path.getsize(original_apex_filepath)
+    self.assertEqual(apex_infos[0].decompressed_size, uncompressed_apex_size)
+
   def test_GetPackageMetadata_retrofitDynamicPartitions(self):
     target_info = common.BuildInfo(self.TEST_TARGET_INFO_DICT, None)
     common.OPTIONS.retrofit_dynamic_partitions = True
@@ -322,7 +343,10 @@
     common.OPTIONS.incremental_source = ''
     common.OPTIONS.downgrade = True
     common.OPTIONS.wipe_user_data = True
+    common.OPTIONS.spl_downgrade = True
     metadata = self.GetLegacyOtaMetadata(target_info, source_info)
+    # Reset spl_downgrade so other tests are unaffected
+    common.OPTIONS.spl_downgrade = False
 
     self.assertDictEqual(
         {
@@ -338,6 +362,7 @@
             'pre-device': 'product-device',
             'pre-build': 'build-fingerprint-source',
             'pre-build-incremental': 'build-version-incremental-source',
+            'spl-downgrade': 'yes',
         },
         metadata)
 
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index 82f5938..646b04d 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -57,3 +57,17 @@
         False,
         apk_keys)
     self.assertTrue(os.path.exists(signed_test_apex))
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_SignCompressedApexFile(self):
+    apex = os.path.join(test_utils.get_current_dir(), 'com.android.apex.compressed.v1.capex')
+    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    container_key = os.path.join(self.testdata_dir, 'testkey')
+    signed_apex = sign_apex.SignApexFile(
+        'avbtool',
+        apex,
+        payload_key,
+        container_key,
+        False,
+        codename_to_api_level_map={'S': 31})
+    self.assertTrue(os.path.exists(signed_apex))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index ccd97a9..808b392 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -55,6 +55,11 @@
   current_dir = os.path.dirname(os.path.realpath(__file__))
   return os.path.join(current_dir, 'testdata')
 
+def get_current_dir():
+  """Returns the current dir, relative to the script dir."""
+  # The script dir is the one we want, which could be different from pwd.
+  current_dir = os.path.dirname(os.path.realpath(__file__))
+  return current_dir
 
 def get_search_path():
   """Returns the search path that has 'framework/signapk.jar' under."""
diff --git a/tools/releasetools/test_validate_target_files.py b/tools/releasetools/test_validate_target_files.py
index e9896ae..48b563d 100644
--- a/tools/releasetools/test_validate_target_files.py
+++ b/tools/releasetools/test_validate_target_files.py
@@ -357,9 +357,6 @@
         'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
         'ro.product.odm.device=coral',
     ]
-    input_tmp = ValidateTargetFilesTest.make_build_prop({
-        'ODM/etc/build.prop': '\n'.join(build_prop),
-    })
+    input_tmp = ValidateTargetFilesTest.make_build_prop(build_prop)
 
-    self.assertRaises(ValueError, CheckBuildPropDuplicity,
-                        input_tmp)
+    self.assertRaises(ValueError, CheckBuildPropDuplicity, input_tmp)
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index d2178b2..401857f 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -236,6 +236,7 @@
 
   logging.info('Done checking %s', script_path)
 
+
 # Symlink files in `src` to `dst`, if the files do not
 # already exists in `dst` directory.
 def symlinkIfNotExists(src, dst):
@@ -246,6 +247,7 @@
       continue
     os.symlink(os.path.join(src, filename), os.path.join(dst, filename))
 
+
 def ValidateVerifiedBootImages(input_tmp, info_dict, options):
   """Validates the Verified Boot related images.
 
@@ -423,16 +425,25 @@
           'Verified %s with avbtool (key: %s):\n%s', image, key,
           stdoutdata.rstrip())
 
-def CheckDataDuplicity(lines):
+
+def CheckDataInconsistency(lines):
     build_prop = {}
     for line in lines:
       if line.startswith("import") or line.startswith("#"):
         continue
-      key, value = line.split("=", 1)
+      if "=" not in line:
+        continue
+
+      key, value = line.rstrip().split("=", 1)
       if key in build_prop:
-        return key
+        logging.info("Duplicated key found for {}".format(key))
+        if value != build_prop[key]:
+          logging.error("Key {} is defined twice with different values {} vs {}"
+                        .format(key, value, build_prop[key]))
+          return key
       build_prop[key] = value
 
+
 def CheckBuildPropDuplicity(input_tmp):
   """Check all buld.prop files inside directory input_tmp, raise error
   if they contain duplicates"""
@@ -448,9 +459,11 @@
         continue
       logging.info("Checking {}".format(path))
       with open(path, 'r') as fp:
-        dupKey = CheckDataDuplicity(fp.readlines())
+        dupKey = CheckDataInconsistency(fp.readlines())
         if dupKey:
-          raise ValueError("{} contains duplicate keys for {}", path, dupKey)
+          raise ValueError("{} contains duplicate keys for {}".format(
+              path, dupKey))
+
 
 def main():
   parser = argparse.ArgumentParser(
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index b90f010..bee6a6f 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -16,6 +16,10 @@
 
 // the signapk tool (a .jar application used to sign packages)
 // ============================================================
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 java_binary_host {
     name: "signapk",
     srcs: ["src/**/*.java"],
diff --git a/tools/signtos/Android.bp b/tools/signtos/Android.bp
index 688e7b8..cd41acc 100644
--- a/tools/signtos/Android.bp
+++ b/tools/signtos/Android.bp
@@ -16,6 +16,10 @@
 
 // the signtos tool - signs Trusty images
 // ============================================================
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 java_library_host {
     name: "signtos",
     srcs: ["SignTos.java"],
diff --git a/tools/test_extract_kernel.py b/tools/test_extract_kernel.py
index 1a1cfcb..002e387 100644
--- a/tools/test_extract_kernel.py
+++ b/tools/test_extract_kernel.py
@@ -15,16 +15,16 @@
 # limitations under the License.
 
 import unittest
-from extract_kernel import get_version, dump_version
+from extract_kernel import dump_version
 
 class ExtractKernelTest(unittest.TestCase):
   def test_extract_version(self):
-    self.assertEqual("4.9.100", get_version(
-        b'Linux version 4.9.100-a123 (a@a) (a) a\n\x00', 0))
-    self.assertEqual("4.9.123", get_version(
-        b'Linux version 4.9.123 (@) () \n\x00', 0))
+    self.assertEqual("4.9.100", dump_version(
+        b'Linux version 4.9.100-a123 (a@a) (a) a\n\x00'))
+    self.assertEqual("4.9.123", dump_version(
+        b'Linux version 4.9.123 (@) () \n\x00'))
 
   def test_dump_self(self):
     self.assertEqual("4.9.1", dump_version(
         b"trash\x00Linux version 4.8.8\x00trash\x00"
-        "other trash Linux version 4.9.1-g3 (2@s) (2) a\n\x00"))
+        b"other trash Linux version 4.9.1-g3 (2@s) (2) a\n\x00"))
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 1ebf4eb..8cab04c 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -4,6 +4,10 @@
 // Zip alignment tool
 //
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 cc_defaults {
     name: "zipalign_defaults",
     target: {
diff --git a/tools/ziptime/Android.bp b/tools/ziptime/Android.bp
index 5ef45ed..fa46b30 100644
--- a/tools/ziptime/Android.bp
+++ b/tools/ziptime/Android.bp
@@ -18,6 +18,10 @@
 // Zip timestamp removal tool
 //
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 cc_binary_host {
 
     srcs: [