Merge "Add libconnectivity_native to LLNDK"
diff --git a/.gitignore b/.gitignore
index f1f4a52..54c90ed 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,3 +1,4 @@
+*.iml
 *.pyc
 *.swp
 blueprint/
diff --git a/Changes.md b/Changes.md
index cabbed6..3ad2641 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,88 @@
 # Build System Changes for Android.mk Writers
 
+## Stop referencing sysprop_library directly from cc modules
+
+For the migration to Bazel, we are no longer mapping sysprop_library targets
+to their generated `cc_library` counterparts when dependning on them from a
+cc module. Instead, directly depend on the generated module by prefixing the
+module name with `lib`. For example, depending on the following module:
+
+```
+sysprop_library {
+    name: "foo",
+    srcs: ["foo.sysprop"],
+}
+```
+
+from a module named `bar` can be done like so:
+
+```
+cc_library {
+    name: "bar",
+    srcs: ["bar.cc"],
+    deps: ["libfoo"],
+}
+```
+
+Failure to do this will result in an error about a missing variant.
+
+## Gensrcs starts disallowing depfile property
+
+To migrate all gensrcs to Bazel, we are restricting the use of depfile property
+because Bazel requires specifying the dependencies directly.
+
+To fix existing uses, remove depfile and directly specify all the dependencies
+in .bp files. For example:
+
+```
+gensrcs {
+    name: "framework-cppstream-protos",
+    tools: [
+        "aprotoc",
+        "protoc-gen-cppstream",
+    ],
+    cmd: "mkdir -p $(genDir)/$(in) " +
+        "&& $(location aprotoc) " +
+        "  --plugin=$(location protoc-gen-cppstream) " +
+        "  -I . " +
+        "  $(in) ",
+    srcs: [
+        "bar.proto",
+    ],
+    output_extension: "srcjar",
+}
+```
+where `bar.proto` imports `external.proto` would become
+
+```
+gensrcs {
+    name: "framework-cppstream-protos",
+    tools: [
+        "aprotoc",
+        "protoc-gen-cpptream",
+    ],
+    tool_files: [
+        "external.proto",
+    ],
+    cmd: "mkdir -p $(genDir)/$(in) " +
+        "&& $(location aprotoc) " +
+        "  --plugin=$(location protoc-gen-cppstream) " +
+        "  $(in) ",
+    srcs: [
+        "bar.proto",
+    ],
+    output_extension: "srcjar",
+}
+```
+as in https://android-review.googlesource.com/c/platform/frameworks/base/+/2125692/.
+
+`BUILD_BROKEN_DEPFILE` can be used to allowlist usage of depfile in `gensrcs`.
+
+If `depfile` is needed for generating javastream proto, `java_library` with `proto.type`
+set `stream` is the alternative solution. Sees
+https://android-review.googlesource.com/c/platform/packages/modules/Permission/+/2118004/
+for an example.
+
 ## Genrule starts disallowing directory inputs
 
 To better specify the inputs to the build, we are restricting use of directories
@@ -733,6 +816,38 @@
 Clang is the default and only supported Android compiler, so there is no reason
 for this option to exist.
 
+### Stop using clang property
+
+Clang has been deleted from Soong. To fix any build errors, remove the clang
+property from affected Android.bp files using bpmodify.
+
+
+``` make
+go run bpmodify.go -w -m=module_name -remove-property=true -property=clang filepath
+```
+
+`BUILD_BROKEN_CLANG_PROPERTY` can be used as temporarily workaround
+
+
+### Stop using clang_cflags and clang_asflags
+
+clang_cflags and clang_asflags are deprecated.
+To fix any build errors, use bpmodify to either
+    - move the contents of clang_asflags/clang_cflags into asflags/cflags or
+    - delete clang_cflags/as_flags as necessary
+
+To Move the contents:
+``` make
+go run bpmodify.go -w -m=module_name -move-property=true -property=clang_cflags -new-location=cflags filepath
+```
+
+To Delete:
+``` make
+go run bpmodify.go -w -m=module_name -remove-property=true -property=clang_cflags filepath
+```
+
+`BUILD_BROKEN_CLANG_ASFLAGS` and `BUILD_BROKEN_CLANG_CFLAGS` can be used as temporarily workarounds
+
 ### Other envsetup.sh variables  {#other_envsetup_variables}
 
 * ANDROID_TOOLCHAIN
diff --git a/OWNERS b/OWNERS
index 4cac0f5..8a1cc34 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1 +1,4 @@
 include platform/build/soong:/OWNERS
+
+# Finalization scripts
+per-file finalize* = smoreland@google.com, alexbuy@google.com
diff --git a/core/BUILD b/core/BUILD
new file mode 100644
index 0000000..3e69e62
--- /dev/null
+++ b/core/BUILD
@@ -0,0 +1,4 @@
+# Export tradefed templates for tests.
+exports_files(
+    glob(["*.xml"]),
+)
diff --git a/core/Makefile b/core/Makefile
index e9bca77..dc8856b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -7,6 +7,7 @@
 SYSTEM_NOTICE_DEPS :=
 VENDOR_NOTICE_DEPS :=
 UNMOUNTED_NOTICE_DEPS :=
+UNMOUNTED_NOTICE_VENDOR_DEPS :=
 ODM_NOTICE_DEPS :=
 OEM_NOTICE_DEPS :=
 PRODUCT_NOTICE_DEPS :=
@@ -94,6 +95,8 @@
 $(pcf_ignored_file):
 	echo "$(PRIVATE_IGNORED)" | tr " " "\n" >$@
 
+$(call declare-0p-target,$(pcf_ignored_file))
+
 $(call dist-for-goals,droidcore-unbundled,$(pcf_ignored_file):logs/$(notdir $(pcf_ignored_file)))
 
 pcf_ignored_file :=
@@ -168,6 +171,8 @@
 	    echo "HTML_OUTPUT=$(ndk_doxygen_out)" \
 	) | doxygen -
 
+$(call declare-1p-target,$(ndk_doxygen_out)/index.html,)
+
 # Note: Not a part of the docs target because we don't have doxygen available.
 # You can run this target locally if you have doxygen installed.
 ndk-docs: $(ndk_doxygen_out)/index.html
@@ -223,6 +228,8 @@
 	            echo "$$x"generic >> $@.tmp; done
 	$(hide) mv $@.tmp $@
 
+$(call declare-0p-target,$(INSTALLED_SDK_BUILD_PROP_TARGET))
+
 # -----------------------------------------------------------------
 # declare recovery ramdisk files
 ifeq ($(BUILDING_RECOVERY_IMAGE),true)
@@ -593,11 +600,15 @@
 	    $(if $(PACKAGES.$(p).EXTERNAL_KEY),\
 	      $(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),EXTERNAL,,$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@),\
 	      $(call _apkcerts_write_line,$(PACKAGES.$(p).STEM),$(PACKAGES.$(p).CERTIFICATE),$(PACKAGES.$(p).PRIVATE_KEY),$(PACKAGES.$(p).COMPRESSED),$(PACKAGES.$(p).PARTITION),$@))))
-	$(if $(filter true,$(PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA)),\
-	  $(call _apkcerts_write_line,$(notdir $(basename $(FSVERITY_APK_OUT))),$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system,$@))
+	$(if $(filter true,$(PRODUCT_FSVERITY_GENERATE_METADATA)),\
+	  $(call _apkcerts_write_line,BuildManifest,$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system,$@) \
+	  $(if $(filter true,$(BUILDING_SYSTEM_EXT_IMAGE)),\
+            $(call _apkcerts_write_line,BuildManifestSystemExt,$(FSVERITY_APK_KEY_PATH).x509.pem,$(FSVERITY_APK_KEY_PATH).pk8,,system_ext,$@)))
 	# In case value of PACKAGES is empty.
 	$(hide) touch $@
 
+$(call declare-0p-target,$(APKCERTS_FILE))
+
 .PHONY: apkcerts-list
 apkcerts-list: $(APKCERTS_FILE)
 
@@ -614,6 +625,7 @@
 	@rm -f $@
 	@$(foreach s,$(STATS.MODULE_TYPE),echo "modules_type_make,$(s),$(words $(STATS.MODULE_TYPE.$(s)))" >>$@;)
 	@$(foreach s,$(STATS.SOONG_MODULE_TYPE),echo "modules_type_soong,$(s),$(STATS.SOONG_MODULE_TYPE.$(s))" >>$@;)
+$(call declare-1p-target,$(BUILD_SYSTEM_STATS),build)
 $(call dist-for-goals,droidcore-unbundled,$(BUILD_SYSTEM_STATS))
 
 # -----------------------------------------------------------------
@@ -634,35 +646,48 @@
 	@rm -f $@
 	@$(foreach s,$(SOONG_CONV),echo "$(s),$(SOONG_CONV.$(s).TYPE),$(sort $(SOONG_CONV.$(s).PROBLEMS)),$(sort $(filter-out $(SOONG_ALREADY_CONV),$(SOONG_CONV.$(s).DEPS))),$(sort $(SOONG_CONV.$(s).MAKEFILES)),$(sort $(SOONG_CONV.$(s).INSTALLED))" >>$@;)
 
+$(call declare-1p-target,$(SOONG_CONV_DATA),build)
+
 SOONG_TO_CONVERT_SCRIPT := build/make/tools/soong_to_convert.py
 SOONG_TO_CONVERT := $(PRODUCT_OUT)/soong_to_convert.txt
 $(SOONG_TO_CONVERT): $(SOONG_CONV_DATA) $(SOONG_TO_CONVERT_SCRIPT)
 	@rm -f $@
 	$(hide) $(SOONG_TO_CONVERT_SCRIPT) $< >$@
+$(call declare-1p-target,$(SOONG_TO_CONVERT),build)
 $(call dist-for-goals,droidcore-unbundled,$(SOONG_TO_CONVERT))
 
+$(PRODUCT_OUT)/product_packages.txt:
+	@rm -f $@
+	echo "" > $@
+	$(foreach x,$(PRODUCT_PACKAGES),echo $(x) >> $@$(newline))
+
 MK2BP_CATALOG_SCRIPT := build/make/tools/mk2bp_catalog.py
+PRODUCT_PACKAGES_TXT := $(PRODUCT_OUT)/product_packages.txt
 MK2BP_REMAINING_HTML := $(PRODUCT_OUT)/mk2bp_remaining.html
 $(MK2BP_REMAINING_HTML): PRIVATE_CODE_SEARCH_BASE_URL := "https://cs.android.com/android/platform/superproject/+/master:"
-$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_HTML): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
 		--title="Remaining Android.mk files for $(TARGET_DEVICE)-$(TARGET_BUILD_VARIANT)" \
 		--codesearch=$(PRIVATE_CODE_SEARCH_BASE_URL) \
-		--out_dir="$(OUT_DIR)" \
+		--out-dir="$(OUT_DIR)" \
 		--mode=html \
 		> $@
+$(call declare-1p-target,$(MK2BP_REMAINING_HTML),build)
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_HTML))
 
 MK2BP_REMAINING_CSV := $(PRODUCT_OUT)/mk2bp_remaining.csv
-$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT)
+$(MK2BP_REMAINING_CSV): $(SOONG_CONV_DATA) $(MK2BP_CATALOG_SCRIPT) $(PRODUCT_PACKAGES_TXT)
 	@rm -f $@
 	$(hide) $(MK2BP_CATALOG_SCRIPT) \
 		--device=$(TARGET_DEVICE) \
-		--out_dir="$(OUT_DIR)" \
+		--product-packages=$(PRODUCT_PACKAGES_TXT) \
+		--out-dir="$(OUT_DIR)" \
 		--mode=csv \
 		> $@
+$(call declare-1p-target,$(MK2BP_REMAINING_CSV))
 $(call dist-for-goals,droidcore-unbundled,$(MK2BP_REMAINING_CSV))
 
 # -----------------------------------------------------------------
@@ -672,8 +697,10 @@
 	@rm -f $@
 	echo "# Modules using -Wno-error" >> $@
 	for m in $(sort $(SOONG_MODULES_USING_WNO_ERROR) $(MODULES_USING_WNO_ERROR)); do echo $$m >> $@; done
-	echo "# Modules added default -Wall" >> $@
-	for m in $(sort $(SOONG_MODULES_ADDED_WALL) $(MODULES_ADDED_WALL)); do echo $$m >> $@; done
+	echo "# Modules that allow warnings" >> $@
+	for m in $(sort $(SOONG_MODULES_WARNINGS_ALLOWED) $(MODULES_WARNINGS_ALLOWED)); do echo $$m >> $@; done
+
+$(call declare-0p-target,$(WALL_WERROR))
 
 $(call dist-for-goals,droidcore-unbundled,$(WALL_WERROR))
 
@@ -681,6 +708,8 @@
 # C/C++ flag information for modules
 $(call dist-for-goals,droidcore-unbundled,$(SOONG_MODULES_CFLAG_ARTIFACTS))
 
+$(foreach a,$(SOONG_MODULES_CFLAG_ARTIFACTS),$(call declare-0p-target,$(call word-colon,1,$(a))))
+
 # -----------------------------------------------------------------
 # Modules missing profile files
 PGO_PROFILE_MISSING := $(PRODUCT_OUT)/pgo_profile_file_missing.txt
@@ -689,12 +718,15 @@
 	echo "# Modules missing PGO profile files" >> $@
 	for m in $(SOONG_MODULES_MISSING_PGO_PROFILE_FILE); do echo $$m >> $@; done
 
+$(call declare-0p-target,$(PGO_PROFILE_MISSING))
+
 $(call dist-for-goals,droidcore,$(PGO_PROFILE_MISSING))
 
 CERTIFICATE_VIOLATION_MODULES_FILENAME := $(PRODUCT_OUT)/certificate_violation_modules.txt
 $(CERTIFICATE_VIOLATION_MODULES_FILENAME):
 	rm -f $@
 	$(foreach m,$(sort $(CERTIFICATE_VIOLATION_MODULES)), echo $(m) >> $@;)
+$(call declare-0p-target,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
 $(call dist-for-goals,droidcore,$(CERTIFICATE_VIOLATION_MODULES_FILENAME))
 
 # -----------------------------------------------------------------
@@ -731,6 +763,8 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(MERGETAGS) -o $@ $(PRIVATE_SRC_FILES)
 
+$(call declare-0p-target,$(all_event_log_tags_file))
+
 # Include tags from all packages included in this product, plus all
 # tags that are part of the system (ie, not in a vendor/ or device/
 # directory).
@@ -809,10 +843,7 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(call declare-0p-target,$(INSTALLED_FILES_FILE_ROOT))
-
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_ROOT))
-endif
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ROOT))
 
 #------------------------------------------------------------------
 # dtb
@@ -843,10 +874,8 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_RAMDISK)))
 
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE_RAMDISK))
-endif
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
 
 ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
@@ -874,7 +903,7 @@
 $(call declare-1p-container,$(INSTALLED_RAMDISK_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_RAMDISK_TARGET),$(INTERNAL_RAMDISK_FILE),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_RAMDISK_TARGET)
 
 .PHONY: ramdisk-nodeps
 ramdisk-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -909,6 +938,7 @@
   my_apex_extracted_boot_image := $(ALL_MODULES.$(my_installed_prebuilt_gki_apex).EXTRACTED_BOOT_IMAGE)
   INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
   $(eval $(call copy-one-file,$(my_apex_extracted_boot_image),$(INSTALLED_BOOTIMAGE_TARGET)))
+  $(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
 
   INTERNAL_PREBUILT_BOOTIMAGE := $(my_apex_extracted_boot_image)
 
@@ -937,17 +967,16 @@
   $(if $(1),--partition_size $(1),--dynamic_partition_size)
 endef
 
+ifndef BOARD_PREBUILT_BOOTIMAGE
+
 ifneq ($(strip $(TARGET_NO_KERNEL)),true)
 INTERNAL_BOOTIMAGE_ARGS := \
 	$(addprefix --second ,$(INSTALLED_2NDBOOTLOADER_TARGET))
 
-INTERNAL_INIT_BOOT_IMAGE_ARGS :=
-
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+# TODO(b/229701033): clean up BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK.
+ifneq ($(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK),true)
   ifneq ($(BUILDING_INIT_BOOT_IMAGE),true)
     INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
-  else
-    INTERNAL_INIT_BOOT_IMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
   endif
 endif
 
@@ -959,15 +988,6 @@
 
 INTERNAL_BOOTIMAGE_FILES := $(filter-out --%,$(INTERNAL_BOOTIMAGE_ARGS))
 
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-VERITY_KEYID := veritykeyid=id:`openssl x509 -in $(PRODUCT_VERITY_SIGNING_KEY).x509.pem -text \
-                | grep keyid | sed 's/://g' | tr -d '[:space:]' | tr '[:upper:]' '[:lower:]' | sed 's/keyid//g'`
-endif
-endif
-
-INTERNAL_KERNEL_CMDLINE := $(strip $(INTERNAL_KERNEL_CMDLINE) buildvariant=$(TARGET_BUILD_VARIANT) $(VERITY_KEYID))
-
 # kernel cmdline/base/pagesize in boot.
 # - If using GKI, use GENERIC_KERNEL_CMDLINE. Remove kernel base and pagesize because they are
 #   device-specific.
@@ -1073,40 +1093,17 @@
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_board_avb_enabled,$@)
 
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
 $(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES) $(INTERNAL_GKI_CERTIFICATE_DEPS),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH) $(INTERNAL_GKI_CERTIFICATE_DEPS)
 	@echo "make $@: ignoring dependencies"
 	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_board_avb_enabled,$(b)))
 
-else ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)) # BOARD_AVB_ENABLE != true
-
-# $1: boot image target
-define build_boot_supports_boot_signer
-  $(MKBOOTIMG) --kernel $(call bootimage-to-kernel,$(1)) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1)
-  $(BOOT_SIGNER) /boot $@ $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)
-  $(call assert-max-image-size,$(1),$(call get-bootimage-partition-size,$(1),boot))
-endef
-
-$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
-	$(call pretty,"Target boot image: $@")
-	$(call build_boot_supports_boot_signer,$@)
-
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
-
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
-
-.PHONY: bootimage-nodeps
-bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
-	@echo "make $@: ignoring dependencies"
-	$(foreach b,$(INSTALLED_BOOTIMAGE_TARGET),$(call build_boot_supports_boot_signer,$(b)))
-
-else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # PRODUCT_SUPPORTS_BOOT_SIGNER != true
+else ifeq (true,$(PRODUCT_SUPPORTS_VBOOT)) # BOARD_AVB_ENABLE != true
 
 # $1: boot image target
 define build_boot_supports_vboot
@@ -1119,10 +1116,10 @@
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_supports_vboot,$@)
 
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
 $(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER) $(FUTILITY)
@@ -1141,10 +1138,10 @@
 	$(call pretty,"Target boot image: $@")
 	$(call build_boot_novboot,$@)
 
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
 $(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_BOOTIMAGE_FILES),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG)
@@ -1155,7 +1152,10 @@
 endif # BUILDING_BOOT_IMAGE
 
 else # TARGET_NO_KERNEL == "true"
-ifdef BOARD_PREBUILT_BOOTIMAGE
+INSTALLED_BOOTIMAGE_TARGET :=
+endif # TARGET_NO_KERNEL
+
+else # BOARD_PREBUILT_BOOTIMAGE defined
 INTERNAL_PREBUILT_BOOTIMAGE := $(BOARD_PREBUILT_BOOTIMAGE)
 INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
 
@@ -1168,19 +1168,17 @@
 	    --partition_name boot $(INTERNAL_AVB_BOOT_SIGNING_ARGS) \
 	    $(BOARD_AVB_BOOT_ADD_HASH_FOOTER_ARGS)
 
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",bool)
 $(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(INTERNAL_PREBUILT_BOOTIMAGE),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
 else
 $(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE)
 	cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
 endif # BOARD_AVB_ENABLE
 
-else # BOARD_PREBUILT_BOOTIMAGE not defined
-INSTALLED_BOOTIMAGE_TARGET :=
 endif # BOARD_PREBUILT_BOOTIMAGE
-endif # TARGET_NO_KERNEL
+
 endif # my_installed_prebuilt_gki_apex not defined
 
 my_apex_extracted_boot_image :=
@@ -1193,6 +1191,8 @@
 INSTALLED_INIT_BOOT_IMAGE_TARGET := $(PRODUCT_OUT)/init_boot.img
 $(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(MKBOOTIMG) $(INSTALLED_RAMDISK_TARGET)
 
+INTERNAL_INIT_BOOT_IMAGE_ARGS := --ramdisk $(INSTALLED_RAMDISK_TARGET)
+
 ifdef BOARD_KERNEL_PAGESIZE
   INTERNAL_INIT_BOOT_IMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
 endif
@@ -1219,7 +1219,7 @@
 $(call declare-1p-target,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
 endif
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
 
 else # BUILDING_INIT_BOOT_IMAGE is not true
 
@@ -1245,7 +1245,7 @@
 $(call declare-1p-target,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
 endif # BOARD_AVB_ENABLE
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_INIT_BOOT_IMAGE_TARGET)
 
 else # BOARD_PREBUILT_INIT_BOOT_IMAGE not defined
 INSTALLED_INIT_BOOT_IMAGE_TARGET :=
@@ -1258,10 +1258,6 @@
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_VENDOR_RAMDISK_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
 ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
 
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
-  $(error vboot 1.0 does not support vendor_boot partition)
-endif
-
 INTERNAL_VENDOR_RAMDISK_FILES := $(filter $(TARGET_VENDOR_RAMDISK_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES))
 
@@ -1301,6 +1297,7 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_RAMDISK)))
 
 ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
   ifneq ($(BUILDING_VENDOR_KERNEL_BOOT_IMAGE),true)
@@ -1421,6 +1418,9 @@
 	$(FILESLIST) $(TARGET_VENDOR_KERNEL_RAMDISK_OUT) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK))
+
 INTERNAL_VENDOR_KERNEL_BOOTIMAGE_ARGS := --vendor_ramdisk $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET)
 INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/vendor_kernel_boot.img
 $(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET)
@@ -1450,6 +1450,14 @@
 	$(MKBOOTIMG) $(INTERNAL_VENDOR_KERNEL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --vendor_boot $@
 	$(call assert-max-image-size,$@,$(BOARD_VENDOR_KERNEL_BOOTIMAGE_PARTITION_SIZE))
 endif
+$(call declare-1p-container,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),)
+ifdef BOARD_INCLUDE_DTB_IN_BOOTIMG
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),\
+    $(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET) $(INSTALLED_DTBIMAGE_TARGET),\
+    $(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET):)
+else
+$(call declare-container-license-deps,$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),$(INTERNAL_VENDOR_KERNEL_RAMDISK_TARGET),$(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET):)
+endif
 endif # BUILDING_VENDOR_KERNEL_BOOT_IMAGE
 
 # -----------------------------------------------------------------
@@ -1476,8 +1484,9 @@
 define xml-notice-rule
 $(1): PRIVATE_PRODUCT := $(2)
 $(1): PRIVATE_MESSAGE := $(3)
+$(1): PRIVATE_DEPS := $(call corresponding-license-metadata,$(4))
 $(1): $(call corresponding-license-metadata,$(4)) $(XMLNOTICE) $(BUILD_SYSTEM)/Makefile
-	OUT_DIR=$(OUT_DIR) $(XMLNOTICE) -o $$@ -product=$$(PRIVATE_PRODUCT) -title=$$(PRIVATE_MESSAGE) $(foreach prefix, $(5), -strip_prefix=$(prefix)) $(call corresponding-license-metadata,$(4))
+	OUT_DIR=$(OUT_DIR) $(XMLNOTICE) -o $$@ -product=$$(PRIVATE_PRODUCT) -title=$$(PRIVATE_MESSAGE) $(foreach prefix, $(5), -strip_prefix=$(prefix)) $$(PRIVATE_DEPS)
 
 notice_files: $(1)
 endef
@@ -1514,48 +1523,7 @@
 notice_files: $(1)
 endef
 
-# Create the rule to combine the files into text and html/xml forms
-# $(1) - xml_excluded_system_product_odm_vendor_dlkm_odm_dlkm|
-#        xml_excluded_vendor_product_odm_vendor_dlkm_odm_dlkm|
-#        xml_product|xml_odm|xml_system_ext|xml_system|xml_vendor_dlkm|
-#        xml_odm_dlkm|html
-# $(2) - Plain text output file
-# $(3) - HTML/XML output file
-# $(4) - File title
-# $(5) - Directory to use.  Notice files are all $(5)/src.  Other
-#		 directories in there will be used for scratch
-# $(6) - Dependencies for the output files
-# $(7) - Directories to exclude
-#
-# The algorithm here is that we go collect a hash for each of the notice
-# files and write the names of the files that match that hash.  Then
-# to generate the real files, we go print out all of the files and their
-# hashes.
-#
-# These rules are fairly complex, so they depend on this makefile so if
-# it changes, they'll run again.
-#
-# TODO: We could clean this up so that we just record the locations of the
-# original notice files instead of making rules to copy them somwehere.
-# Then we could traverse that without quite as much bash drama.
-define combine-notice-files
-$(2): PRIVATE_MESSAGE := $(4)
-$(2): PRIVATE_DIR := $(5)
-$(2): .KATI_IMPLICIT_OUTPUTS := $(3)
-$(2): $(6) $(BUILD_SYSTEM)/Makefile build/make/tools/generate-notice-files.py
-	build/make/tools/generate-notice-files.py --text-output $(2) $(foreach xdir, $(7), -e $(xdir) )\
-	    $(if $(filter $(1),xml_excluded_vendor_product_odm_vendor_dlkm_odm_dlkm),-e vendor -e product -e system_ext -e odm -e vendor_dlkm -e odm_dlkm --xml-output, \
-	      $(if $(filter $(1),xml_excluded_system_product_odm_vendor_dlkm_odm_dlkm),-e system -e product -e system_ext -e odm -e vendor_dlkm -e odm_dlkm --xml-output, \
-	        $(if $(filter $(1),xml_product),-i product --xml-output, \
-	          $(if $(filter $(1),xml_system_ext),-i system_ext --xml-output, \
-	            $(if $(filter $(1),xml_system),-i system --xml-output, \
-	              $(if $(filter $(1),xml_odm),-i odm --xml-output, \
-	                $(if $(filter $(1),xml_vendor_dlkm),-i vendor_dlkm --xml-output, \
-	                  $(if $(filter $(1),xml_odm_dlkm),-i odm_dlkm --xml-output, \
-	                    --html-output)))))))) $(3) \
-	    -t $$(PRIVATE_MESSAGE) $$(foreach dir,$$(sort $$(PRIVATE_DIR)), -s $$(dir)/src)
-notice_files: $(2) $(3)
-endef
+$(KATI_obsolete_var combine-notice-files, To create notice files use xml-notice-rule, html-notice-rule, or text-notice-rule.)
 
 # Notice file logic isn't relevant for TARGET_BUILD_APPS
 ifndef TARGET_BUILD_APPS
@@ -1563,9 +1531,7 @@
 # TODO These intermediate NOTICE.txt/NOTICE.html files should go into
 # TARGET_OUT_NOTICE_FILES now that the notice files are gathered from
 # the src subdirectory.
-target_notice_file_txt := $(TARGET_OUT_INTERMEDIATES)/NOTICE.txt
 kernel_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/kernel.txt
-winpthreads_notice_file := $(TARGET_OUT_NOTICE_FILES)/src/winpthreads.txt
 
 # Some targets get included under $(PRODUCT_OUT) for debug symbols or other
 # reasons--not to be flashed onto any device. Targets under these directories
@@ -1688,36 +1654,80 @@
 
 ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
 
+need_vendor_notice:=false
+ifeq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
+   need_vendor_notice:=true
+endif
+
+ifdef BUILDING_DEBUG_VENDOR_BOOT_IMAGE
+   need_vendor_notice:=true
+endif
+
+ifdef BUILDING_VENDOR_IMAGE
+   need_vendor_notice:=true
+endif
+
+ifeq (true,$(need_vendor_notice))
+ifneq (,$(installed_vendor_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
+endif
+endif
+
+need_vendor_notice:=
+
+ifdef BUILDING_ODM_IMAGE
+ifneq (,$(installed_odm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_PRODUCT_IMAGE
+ifneq (,$(installed_product_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+ifneq (,$(installed_system_ext_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_VENDOR_DLKM_IMAGE
+ifneq (,$(installed_vendor_dlkm_notice_xml_gz)
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_ODM_DLKM_IMAGE
+ifneq (,$(installed_odm_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
+endif
+endif
+
+ifdef BUILDING_SYSTEM_DLKM_IMAGE
+ifneq (,$(installed_system_dlkm_notice_xml_gz))
+ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
+endif
+endif
+
 endif  # TARGET_BUILD_APPS
 
-# The kernel isn't really a module, so to get its module file in there, we
-# make the target NOTICE files depend on this particular file too, which will
-# then be in the right directory for the find in combine-notice-files to work.
+# Presently none of the prebuilts etc. comply with policy to have a license text. Fake one here.
 $(eval $(call copy-one-file,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,$(kernel_notice_file)))
 
-# No matter where it gets copied from, a copied linux kernel is licensed under "GPL 2.0 only"
-$(eval $(call declare-copy-files-license-metadata,,:kernel,SPDX-license-identifier-GPL-2.0-only,notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,))
+ifneq (,$(strip $(INSTALLED_KERNEL_TARGET)))
+$(call declare-license-metadata,$(INSTALLED_KERNEL_TARGET),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+endif
 
-$(eval $(call copy-one-file,$(BUILD_SYSTEM)/WINPTHREADS_COPYING,$(winpthreads_notice_file)))
+# No matter where it gets copied from, a copied linux kernel is licensed under "GPL 2.0 only"
+$(eval $(call declare-copy-files-license-metadata,,:kernel,SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,kernel))
 
 
 # #################################################################
 # Targets for user images
 # #################################################################
 
-INTERNAL_USERIMAGES_EXT_VARIANT :=
-ifeq ($(TARGET_USERIMAGES_USE_EXT2),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext2
-else
-ifeq ($(TARGET_USERIMAGES_USE_EXT3),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext3
-else
-ifeq ($(TARGET_USERIMAGES_USE_EXT4),true)
-INTERNAL_USERIMAGES_EXT_VARIANT := ext4
-endif
-endif
-endif
-
 # These options tell the recovery updater/installer how to mount the partitions writebale.
 # <fstype>=<fstype_opts>[|<fstype_opts>]...
 # fstype_opts := <opt>[,<opt>]...
@@ -1725,19 +1735,6 @@
 # The following worked on Nexus devices with Kernel 3.1, 3.4, 3.10
 DEFAULT_TARGET_RECOVERY_FSTYPE_MOUNT_OPTIONS := ext4=max_batch_time=0,commit=1,data=ordered,barrier=1,errors=panic,nodelalloc
 
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED))
-  INTERNAL_USERIMAGES_SPARSE_EXT_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED))
-  INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
-  INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
-endif
-ifneq (true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED))
-  INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG := -S
-endif
-
 INTERNAL_USERIMAGES_DEPS := \
     $(BUILD_IMAGE) \
     $(MKE2FS_CONF) \
@@ -1760,8 +1757,12 @@
     $(BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE) \
   ,erofs),)
 INTERNAL_USERIMAGES_DEPS += $(MKEROFS)
+ifeq ($(BOARD_EROFS_USE_LEGACY_COMPRESSION),true)
+BOARD_EROFS_COMPRESSOR ?= "lz4"
+else
 BOARD_EROFS_COMPRESSOR ?= "lz4hc,9"
 endif
+endif
 
 ifneq ($(filter \
     $(BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE) \
@@ -1776,13 +1777,6 @@
 INTERNAL_USERIMAGES_DEPS += $(MKSQUASHFSUSERIMG)
 endif
 
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY))
-INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_METADATA) $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
-ifeq (true,$(PRODUCT_SUPPORTS_VERITY_FEC))
-INTERNAL_USERIMAGES_DEPS += $(FEC)
-endif
-endif
-
 ifeq ($(BOARD_AVB_ENABLE),true)
 INTERNAL_USERIMAGES_DEPS += $(AVBTOOL)
 endif
@@ -1799,14 +1793,6 @@
 
 INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
 
-ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
-
-ifeq ($(PRODUCT_SUPPORTS_VERITY),true)
-  $(error vboot 1.0 doesn't support logical partition)
-endif
-
-endif # PRODUCT_USE_DYNAMIC_PARTITIONS
-
 # $(1) the partition name (eg system)
 # $(2) the image prop file
 define add-common-flags-to-image-props
@@ -1820,6 +1806,7 @@
 define add-common-ro-flags-to-image-props
 $(eval _var := $(call to-upper,$(1)))
 $(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR),$(hide) echo "$(1)_erofs_compressor=$(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS),$(hide) echo "$(1)_erofs_compress_hints=$(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE),$(hide) echo "$(1)_erofs_pcluster_size=$(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
 $(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
@@ -1899,22 +1886,22 @@
 )
 $(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(1)
 
-$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG),$(hide) echo "erofs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EROFS_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
-$(if $(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG),$(hide) echo "f2fs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_F2FS_FLAG)" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_USE_EXT2)),$(hide) echo "fs_type=ext2" >> $(1),
+  $(if $(filter true,$(TARGET_USERIMAGES_USE_EXT3)),$(hide) echo "fs_type=ext3" >> $(1),
+    $(if $(filter true,$(TARGET_USERIMAGES_USE_EXT4)),$(hide) echo "fs_type=ext4" >> $(1))))
+
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EXT_DISABLED)),,$(hide) echo "extfs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_EROFS_DISABLED)),,$(hide) echo "erofs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED)),,$(hide) echo "squashfs_sparse_flag=-s" >> $(1))
+$(if $(filter true,$(TARGET_USERIMAGES_SPARSE_F2FS_DISABLED)),,$(hide) echo "f2fs_sparse_flag=-S" >> $(1))
 $(if $(BOARD_EROFS_COMPRESSOR),$(hide) echo "erofs_default_compressor=$(BOARD_EROFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_EROFS_COMPRESS_HINTS),$(hide) echo "erofs_default_compress_hints=$(BOARD_EROFS_COMPRESS_HINTS)" >> $(1))
 $(if $(BOARD_EROFS_PCLUSTER_SIZE),$(hide) echo "erofs_pcluster_size=$(BOARD_EROFS_PCLUSTER_SIZE)" >> $(1))
 $(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
+$(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
 $(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
 $(if $(BOARD_FLASH_LOGICAL_BLOCK_SIZE), $(hide) echo "flash_logical_block_size=$(BOARD_FLASH_LOGICAL_BLOCK_SIZE)" >> $(1))
 $(if $(BOARD_FLASH_ERASE_BLOCK_SIZE), $(hide) echo "flash_erase_block_size=$(BOARD_FLASH_ERASE_BLOCK_SIZE)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCT_SUPPORTS_VERITY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
-$(if $(PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
 $(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(1))
 $(if $(PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
@@ -1997,8 +1984,8 @@
         $(hide) echo "avb_system_dlkm_rollback_index_location=$(BOARD_SYSTEM_SYSTEM_DLKM_ROLLBACK_INDEX_LOCATION)" >> $(1)))
 $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(hide) echo "recovery_as_boot=true" >> $(1))
-$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
-    $(hide) echo "system_root_image=true" >> $(1))
+$(if $(filter true,$(BOARD_BUILD_GKI_BOOT_IMAGE_WITHOUT_RAMDISK)),\
+    $(hide) echo "gki_boot_image_without_ramdisk=true" >> $(1))
 $(hide) echo "root_dir=$(TARGET_ROOT_OUT)" >> $(1)
 $(if $(filter true,$(PRODUCT_USE_DYNAMIC_PARTITION_SIZE)),\
     $(hide) echo "use_dynamic_partition_size=true" >> $(1))
@@ -2064,8 +2051,6 @@
 INSTALLED_FILES_FILE_RECOVERY := $(PRODUCT_OUT)/installed-files-recovery.txt
 INSTALLED_FILES_JSON_RECOVERY := $(INSTALLED_FILES_FILE_RECOVERY:.txt=.json)
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RECOVERY)))
-
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 INSTALLED_BOOTIMAGE_TARGET := $(BUILT_BOOTIMAGE_TARGET)
 endif
@@ -2084,6 +2069,9 @@
 	$(FILESLIST) $(TARGET_RECOVERY_ROOT_OUT) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_RECOVERY)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_RECOVERY)))
+
 recovery_sepolicy := \
     $(TARGET_RECOVERY_ROOT_OUT)/sepolicy \
     $(TARGET_RECOVERY_ROOT_OUT)/plat_file_contexts \
@@ -2272,20 +2260,18 @@
 #      (BOARD_USES_FULL_RECOVERY_IMAGE = true);
 #   b) We build a single image that contains boot and recovery both - no recovery image to install
 #      (BOARD_USES_RECOVERY_AS_BOOT = true);
-#   c) We mount the system image as / and therefore do not have a ramdisk in boot.img
-#      (BOARD_BUILD_SYSTEM_ROOT_IMAGE = true).
-#   d) We include the recovery DTBO image within recovery - not needing the resource file as we
+#   c) We include the recovery DTBO image within recovery - not needing the resource file as we
 #      do bsdiff because boot and recovery will contain different number of entries
 #      (BOARD_INCLUDE_RECOVERY_DTBO = true).
-#   e) We include the recovery ACPIO image within recovery - not needing the resource file as we
+#   d) We include the recovery ACPIO image within recovery - not needing the resource file as we
 #      do bsdiff because boot and recovery will contain different number of entries
 #      (BOARD_INCLUDE_RECOVERY_ACPIO = true).
-#   f) We build a single image that contains vendor_boot and recovery both - no recovery image to
+#   e) We build a single image that contains vendor_boot and recovery both - no recovery image to
 #      install
 #      (BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT = true).
 
 ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT) \
-  $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO) \
+  $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO) \
   $(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
 # Named '.dat' so we don't attempt to use imgdiff for patching it.
 RECOVERY_RESOURCE_ZIP := $(TARGET_OUT_VENDOR)/etc/recovery-resource.dat
@@ -2339,6 +2325,11 @@
 	$(hide) cat $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) >> $@
 	$(call append-recovery-ui-properties,$(PRIVATE_RECOVERY_UI_PROPERTIES),$@)
 
+$(call declare-1p-target,$(INSTALLED_RECOVERY_BUILD_PROP_TARGET),build)
+$(call declare-license-deps,$(INSTALLED_RECOVERY_BUILD_PROP_TARGET),\
+    $(INSTALLED_BUILD_PROP_TARGET) $(INSTALLED_VENDOR_BUILD_PROP_TARGET) $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+    $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET))
+
 # Only install boot/etc/build.prop to recovery image on recovery_as_boot.
 # On device with dedicated recovery partition, the file should come from the boot
 # ramdisk.
@@ -2346,6 +2337,9 @@
 INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET := $(TARGET_RECOVERY_ROOT_OUT)/$(RAMDISK_BUILD_PROP_REL_PATH)
 $(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET): $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
 	$(copy-file-to-target)
+
+$(call declare-1p-target,$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET),build)
+$(call declare-license-deps,$(INSTALLED_RECOVERY_RAMDISK_BUILD_PROP_TARGET),$(INSTALLED_RAMDISK_BUILD_PROP_TARGET))
 endif
 
 INTERNAL_RECOVERYIMAGE_ARGS := --ramdisk $(recovery_ramdisk)
@@ -2399,8 +2393,7 @@
 	# Use rsync because "cp -Rf" fails to overwrite broken symlinks on Mac.
 	rsync -a --exclude=sdcard $(IGNORE_RECOVERY_SEPOLICY) $(IGNORE_CACHE_LINK) $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT)
 	# Modifying ramdisk contents...
-	$(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),, \
-	  ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init)
+	ln -sf /system/bin/init $(TARGET_RECOVERY_ROOT_OUT)/init
 	# Removes $(TARGET_RECOVERY_ROOT_OUT)/init*.rc EXCEPT init.recovery*.rc.
 	find $(TARGET_RECOVERY_ROOT_OUT) -maxdepth 1 -name 'init*.rc' -type f -not -name "init.recovery.*.rc" | xargs rm -f
 	cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/ 2> /dev/null || true # Ignore error when the src file doesn't exist.
@@ -2433,12 +2426,6 @@
     $(MKBOOTIMG) $(if $(strip $(2)),--kernel $(strip $(2))) $(INTERNAL_RECOVERYIMAGE_ARGS) \
                  $(INTERNAL_MKBOOTIMG_VERSION_ARGS) \
                  $(BOARD_RECOVERY_MKBOOTIMG_ARGS) --output $(1))
-  $(if $(filter true,$(PRODUCT_SUPPORTS_BOOT_SIGNER)),\
-    $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
-      $(BOOT_SIGNER) /boot $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1),\
-      $(BOOT_SIGNER) /recovery $(1) $(PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1)\
-    )\
-  )
   $(if $(filter true,$(PRODUCT_SUPPORTS_VBOOT)), \
     $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)), \
@@ -2451,9 +2438,6 @@
 endef
 
 recoveryimage-deps := $(MKBOOTIMG) $(recovery_ramdisk) $(recovery_kernel)
-ifeq (true,$(PRODUCT_SUPPORTS_BOOT_SIGNER))
-  recoveryimage-deps += $(BOOT_SIGNER)
-endif
 ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
   recoveryimage-deps += $(VBOOT_SIGNER)
 endif
@@ -2480,10 +2464,10 @@
 	$(call pretty,"Target boot image from recovery: $@")
 	$(call build-recoveryimage-target, $@, $(PRODUCT_OUT)/$(subst .img,,$(subst boot,kernel,$(notdir $@))))
 
-$(call declare-1p-container,$(INSTALLED_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",bool)
 $(call declare-container-license-deps,$(INSTALLED_BOOTIMAGE_TARGET),$(recoveryimage-deps),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_BOOTIMAGE_TARGET)
 endif # BOARD_USES_RECOVERY_AS_BOOT
 
 $(INSTALLED_RECOVERYIMAGE_TARGET): $(recoveryimage-deps)
@@ -2501,7 +2485,7 @@
 $(call declare-1p-container,$(INSTALLED_RECOVERYIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_RECOVERYIMAGE_TARGET),$(recoveryimage-deps),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_RECOVERYIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_RECOVERYIMAGE_TARGET)
 
 .PHONY: recoveryimage-nodeps
 recoveryimage-nodeps:
@@ -2566,6 +2550,7 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_DEBUG_RAMDISK)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_DEBUG_RAMDISK)))
 
 ifdef BUILDING_DEBUG_BOOT_IMAGE
 
@@ -2586,7 +2571,7 @@
 $(call declare-1p-container,$(INSTALLED_DEBUG_RAMDISK_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_DEBUG_RAMDISK_TARGET),$(INSTALLED_RAMDISK_TARGET),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DEBUG_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DEBUG_RAMDISK_TARGET)
 
 .PHONY: ramdisk_debug-nodeps
 ramdisk_debug-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -2650,10 +2635,10 @@
 	$(call pretty,"Target boot debug image: $@")
 	$(call build-debug-bootimage-target, $@)
 
-$(call declare-1p-container,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),)
+$(call declare-container-license-metadata,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),SPDX-license-identifier-GPL-2.0-only SPDX-license-identifier-Apache-2.0,restricted notice,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING build/soong/licenses/LICENSE,"Boot Image",boot)
 $(call declare-container-license-deps,$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(INSTALLED_BOOTIMAGE_TARGET),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DEBUG_BOOTIMAGE_TARGET)
 
 .PHONY: bootimage_debug-nodeps
 bootimage_debug-nodeps: $(MKBOOTIMG) $(AVBTOOL)
@@ -2699,7 +2684,8 @@
 	$(FILESLIST) $(INTERNAL_DEBUG_VENDOR_RAMDISK_SRC_DIRS) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK))
 
 INTERNAL_VENDOR_DEBUG_RAMDISK_TARGET := $(call intermediates-dir-for,PACKAGING,vendor_boot-debug)/vendor_ramdisk-debug.cpio$(RAMDISK_EXT)
 
@@ -2805,7 +2791,7 @@
 $(call declare-1p-container,$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_TEST_HARNESS_RAMDISK_TARGET),$(INTERNAL_TEST_HARNESS_RAMDISK_SRC_DEPS),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET)
 
 .PHONY: ramdisk_test_harness-nodeps
 ramdisk_test_harness-nodeps: $(MKBOOTFS) | $(COMPRESSION_COMMAND_DEPS)
@@ -2854,7 +2840,7 @@
 $(call declare-1p-container,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET),$(INSTALLED_DEBUG_BOOTIMAGE_TARGET),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET)
 
 .PHONY: bootimage_test_harness-nodeps
 bootimage_test_harness-nodeps: $(MKBOOTIMG) $(AVBTOOL)
@@ -2940,21 +2926,26 @@
 endef
 
 
-# -----------------------------------------------------------------
-# system image
-
 # FSVerity metadata generation
 # Generate fsverity metadata files (.fsv_meta) and build manifest
-# (system/etc/security/fsverity/BuildManifest.apk) BEFORE filtering systemimage files below
-ifeq ($(PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA),true)
+# (<partition>/etc/security/fsverity/BuildManifest<suffix>.apk) BEFORE filtering systemimage,
+# vendorimage, odmimage, productimage files below.
+ifeq ($(PRODUCT_FSVERITY_GENERATE_METADATA),true)
 
-# Generate fsv_meta
-fsverity-metadata-targets := $(sort $(filter \
+fsverity-metadata-targets-patterns := \
   $(TARGET_OUT)/framework/% \
   $(TARGET_OUT)/etc/boot-image.prof \
   $(TARGET_OUT)/etc/dirty-image-objects \
   $(TARGET_OUT)/etc/preloaded-classes \
-  $(TARGET_OUT)/etc/classpaths/%.pb, \
+  $(TARGET_OUT)/etc/classpaths/%.pb \
+
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+fsverity-metadata-targets-patterns += $(TARGET_OUT_SYSTEM_EXT)/framework/%
+endif
+
+# Generate fsv_meta
+fsverity-metadata-targets := $(sort $(filter \
+  $(fsverity-metadata-targets-patterns), \
   $(ALL_DEFAULT_INSTALLED_MODULES)))
 
 define fsverity-generate-metadata
@@ -2968,38 +2959,66 @@
 $(foreach f,$(fsverity-metadata-targets),$(eval $(call fsverity-generate-metadata,$(f))))
 ALL_DEFAULT_INSTALLED_MODULES += $(addsuffix .fsv_meta,$(fsverity-metadata-targets))
 
-# Generate BuildManifest.apk
 FSVERITY_APK_KEY_PATH := $(DEFAULT_SYSTEM_DEV_CERTIFICATE)
-FSVERITY_APK_OUT := $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk
-FSVERITY_APK_MANIFEST_PATH := system/security/fsverity/AndroidManifest.xml
-$(FSVERITY_APK_OUT): PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
-$(FSVERITY_APK_OUT): PRIVATE_AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
-$(FSVERITY_APK_OUT): PRIVATE_MIN_SDK_VERSION := $(DEFAULT_APP_TARGET_SDK)
-$(FSVERITY_APK_OUT): PRIVATE_VERSION_CODE := $(PLATFORM_SDK_VERSION)
-$(FSVERITY_APK_OUT): PRIVATE_VERSION_NAME := $(APPS_DEFAULT_VERSION_NAME)
-$(FSVERITY_APK_OUT): PRIVATE_APKSIGNER := $(HOST_OUT_EXECUTABLES)/apksigner
-$(FSVERITY_APK_OUT): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_PATH)
-$(FSVERITY_APK_OUT): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
-$(FSVERITY_APK_OUT): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
-$(FSVERITY_APK_OUT): PRIVATE_INPUTS := $(fsverity-metadata-targets)
-$(FSVERITY_APK_OUT): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
+FSVERITY_APK_MANIFEST_TEMPLATE_PATH := system/security/fsverity/AndroidManifest.xml
+
+# Generate and install BuildManifest<suffix>.apk for the given partition
+# $(1): path of the output APK
+# $(2): partition name
+define fsverity-generate-and-install-manifest-apk
+fsverity-metadata-targets-$(2) := $(filter $(PRODUCT_OUT)/$(2)/%,\
+      $(fsverity-metadata-targets))
+$(1): PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
+$(1): PRIVATE_AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
+$(1): PRIVATE_MIN_SDK_VERSION := $(DEFAULT_APP_TARGET_SDK)
+$(1): PRIVATE_VERSION_CODE := $(PLATFORM_SDK_VERSION)
+$(1): PRIVATE_VERSION_NAME := $(APPS_DEFAULT_VERSION_NAME)
+$(1): PRIVATE_APKSIGNER := $(HOST_OUT_EXECUTABLES)/apksigner
+$(1): PRIVATE_MANIFEST := $(FSVERITY_APK_MANIFEST_TEMPLATE_PATH)
+$(1): PRIVATE_FRAMEWORK_RES := $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk
+$(1): PRIVATE_KEY := $(FSVERITY_APK_KEY_PATH)
+$(1): PRIVATE_INPUTS := $$(fsverity-metadata-targets-$(2))
+$(1): PRIVATE_ASSETS := $(call intermediates-dir-for,ETC,build_manifest-$(2))/assets
+$(1): $(HOST_OUT_EXECUTABLES)/fsverity_manifest_generator \
     $(HOST_OUT_EXECUTABLES)/fsverity $(HOST_OUT_EXECUTABLES)/aapt2 \
-    $(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_PATH) \
+    $(HOST_OUT_EXECUTABLES)/apksigner $(FSVERITY_APK_MANIFEST_TEMPLATE_PATH) \
     $(FSVERITY_APK_KEY_PATH).x509.pem $(FSVERITY_APK_KEY_PATH).pk8 \
     $(call intermediates-dir-for,APPS,framework-res,,COMMON)/package-export.apk \
-    $(fsverity-metadata-targets)
-	$< --fsverity-path $(PRIVATE_FSVERITY) --aapt2-path $(PRIVATE_AAPT2) \
-	    --min-sdk-version $(PRIVATE_MIN_SDK_VERSION) \
-	    --version-code $(PRIVATE_VERSION_CODE) \
-	    --version-name $(PRIVATE_VERSION_NAME) \
-	    --apksigner-path $(PRIVATE_APKSIGNER) --apk-key-path $(PRIVATE_KEY) \
-	    --apk-manifest-path $(PRIVATE_MANIFEST) --framework-res $(PRIVATE_FRAMEWORK_RES) \
-	    --output $@ \
-	    --base-dir $(PRODUCT_OUT) $(PRIVATE_INPUTS)
+    $$(fsverity-metadata-targets-$(2))
+	rm -rf $$(PRIVATE_ASSETS)
+	mkdir -p $$(PRIVATE_ASSETS)
+	$$< --fsverity-path $$(PRIVATE_FSVERITY) \
+	    --base-dir $$(PRODUCT_OUT) \
+	    --output $$(PRIVATE_ASSETS)/build_manifest.pb \
+	    $$(PRIVATE_INPUTS)
+	$$(PRIVATE_AAPT2) link -o $$@ \
+	    -A $$(PRIVATE_ASSETS) \
+	    -I $$(PRIVATE_FRAMEWORK_RES) \
+	    --min-sdk-version $$(PRIVATE_MIN_SDK_VERSION) \
+	    --version-code $$(PRIVATE_VERSION_CODE) \
+	    --version-name $$(PRIVATE_VERSION_NAME) \
+	    --manifest $$(PRIVATE_MANIFEST) \
+            --rename-manifest-package com.android.security.fsverity_metadata.$(2)
+	$$(PRIVATE_APKSIGNER) sign --in $$@ \
+	    --cert $$(PRIVATE_KEY).x509.pem \
+	    --key $$(PRIVATE_KEY).pk8
 
-ALL_DEFAULT_INSTALLED_MODULES += $(FSVERITY_APK_OUT)
+ALL_DEFAULT_INSTALLED_MODULES += $(1)
 
-endif  # PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA
+endef  # fsverity-generate-and-install-manifest-apk
+
+$(eval $(call fsverity-generate-and-install-manifest-apk, \
+  $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk,system))
+ifdef BUILDING_SYSTEM_EXT_IMAGE
+  $(eval $(call fsverity-generate-and-install-manifest-apk, \
+    $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk,system_ext))
+endif
+
+endif  # PRODUCT_FSVERITY_GENERATE_METADATA
+
+
+# -----------------------------------------------------------------
+# system image
 
 INSTALLED_FILES_OUTSIDE_IMAGES := $(filter-out $(TARGET_OUT)/%, $(INSTALLED_FILES_OUTSIDE_IMAGES))
 INTERNAL_SYSTEMIMAGE_FILES := $(sort $(filter $(TARGET_OUT)/%, \
@@ -3078,14 +3097,11 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON)))
 
 .PHONY: installed-file-list
 installed-file-list: $(INSTALLED_FILES_FILE)
 
-ifeq ($(HOST_OS),linux)
-$(call dist-for-goals, sdk sdk_addon, $(INSTALLED_FILES_FILE))
-endif
-
 systemimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,systemimage)
 BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
@@ -3128,7 +3144,7 @@
 ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
-ifneq (,$(filter true, $(BOARD_BUILD_SYSTEM_ROOT_IMAGE) $(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
+ifneq (,$(filter true,$(BOARD_INCLUDE_RECOVERY_DTBO) $(BOARD_INCLUDE_RECOVERY_ACPIO)))
 diff_tool := $(HOST_OUT_EXECUTABLES)/bsdiff
 else
 diff_tool := $(HOST_OUT_EXECUTABLES)/imgdiff
@@ -3220,7 +3236,7 @@
 $(call declare-1p-container,$(INSTALLED_USERDATAIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_USERDATAIMAGE_TARGET),$(INSTALLED_USERDATAIMAGE_TARGET_DEPS),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_USERDATAIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_USERDATAIMAGE_TARGET)
 
 .PHONY: userdataimage-nodeps
 userdataimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
@@ -3272,7 +3288,7 @@
 $(call declare-1p-container,$(INSTALLED_BPTIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_BPTIMAGE_TARGET),$(BOARD_BPT_INPUT_FILES),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_BPTIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_BPTIMAGE_TARGET)
 
 .PHONY: bptimage-nodeps
 bptimage-nodeps:
@@ -3311,7 +3327,7 @@
 $(call declare-1p-container,$(INSTALLED_CACHEIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_CACHEIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_CACHEIMAGE_FILES),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_CACHEIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_CACHEIMAGE_TARGET)
 
 .PHONY: cacheimage-nodeps
 cacheimage-nodeps: | $(INTERNAL_USERIMAGES_DEPS)
@@ -3354,6 +3370,7 @@
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
 $(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEMOTHER)))
+$(eval $(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEMOTHER)))
 
 # Determines partition size for system_other.img.
 ifeq ($(PRODUCT_RETROFIT_DYNAMIC_PARTITIONS),true)
@@ -3445,7 +3462,8 @@
 	$(FILESLIST) $(TARGET_OUT_VENDOR) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR))
 
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
@@ -3473,7 +3491,7 @@
 
 VENDOR_NOTICE_DEPS += $(INSTALLED_VENDORIMAGE_TARGET)
 
-$(call declare-1p-container,$(INSTALLED_VENDORIMAGE_TARGET),vendor)
+$(call declare-container-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)
 $(call declare-container-license-deps,$(INSTALLED_VENDORIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(RECOVERY_FROM_BOOT_PATH),$(PRODUCT_OUT)/:/)
 
 .PHONY: vendorimage-nodeps vnod
@@ -3485,6 +3503,10 @@
 else ifdef BOARD_PREBUILT_VENDORIMAGE
 INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
 $(eval $(call copy-one-file,$(BOARD_PREBUILT_VENDORIMAGE),$(INSTALLED_VENDORIMAGE_TARGET)))
+$(if $(strip $(ALL_TARGETS.$(INSTALLED_VENDORIMAGE_TARGET).META_LIC)),,\
+    $(if $(strip $(ALL_TARGETS.$(BOARD_PREBUILT_VENDORIMAGE).META_LIC)),\
+        $(call declare-copy-target-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_PREBUILT_VENDORIMAGE)),\
+        $(call declare-license-metadata,$(INSTALLED_VENDORIMAGE_TARGET),legacy_proprietary,proprietary,,"Vendor Image",vendor)))
 endif
 
 # -----------------------------------------------------------------
@@ -3505,7 +3527,8 @@
 	$(FILESLIST) $(TARGET_OUT_PRODUCT) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_PRODUCT)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_PRODUCT))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_PRODUCT))
 
 productimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,product)
@@ -3564,7 +3587,8 @@
 	$(FILESLIST) $(TARGET_OUT_SYSTEM_EXT) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_EXT)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_EXT))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEM_EXT))
 
 system_extimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,system_ext)
@@ -3643,7 +3667,8 @@
 	$(FILESLIST) $(TARGET_OUT_ODM) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ODM))
 
 odmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,odm)
@@ -3703,7 +3728,8 @@
 	$(FILESLIST) $(TARGET_OUT_VENDOR_DLKM) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_VENDOR_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_VENDOR_DLKM))
 
 vendor_dlkmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor_dlkm)
@@ -3763,7 +3789,8 @@
 	$(FILESLIST) $(TARGET_OUT_ODM_DLKM) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_ODM_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_ODM_DLKM))
 
 odm_dlkmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,odm_dlkm)
@@ -3826,7 +3853,8 @@
 	$(FILESLIST) $(TARGET_OUT_SYSTEM_DLKM) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
 
-$(eval $(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_DLKM)))
+$(call declare-0p-target,$(INSTALLED_FILES_FILE_SYSTEM_DLKM))
+$(call declare-0p-target,$(INSTALLED_FILES_JSON_SYSTEM_DLKM))
 
 system_dlkmimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,system_dlkm)
@@ -3852,6 +3880,11 @@
     $(INSTALLED_FILES_FILE_SYSTEM_DLKM)
 	$(build-system_dlkmimage-target)
 
+SYSTEM_DLKM_NOTICE_DEPS += $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+
+$(call declare-1p-container,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET),$(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(INSTALLED_FILES_FILE_SYSTEM_DLKM),$(PRODUCT_OUT)/:/)
+
 .PHONY: system_dlkmimage-nodeps sdnod
 system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-system_dlkmimage-target)
@@ -3880,7 +3913,7 @@
 $(call declare-1p-container,$(INSTALLED_DTBOIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_DTBOIMAGE_TARGET),$(BOARD_PREBUILT_DTBOIMAGE),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_DTBOIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_DTBOIMAGE_TARGET)
 else
 $(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE)
 	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
@@ -3891,33 +3924,21 @@
 # -----------------------------------------------------------------
 # Protected VM firmware image
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
+
+.PHONY: pvmfwimage
+pvmfwimage: $(INSTALLED_PVMFWIMAGE_TARGET)
+
 INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
 INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
-INTERNAL_PREBUILT_PVMFWIMAGE := packages/modules/Virtualization/pvmfw/pvmfw.img
-INTERNAL_PVMFW_EMBEDDED_AVBKEY := external/avb/test/data/testkey_rsa4096_pub.bin
-
-ifdef BOARD_PREBUILT_PVMFWIMAGE
-PREBUILT_PVMFWIMAGE_TARGET := $(BOARD_PREBUILT_PVMFWIMAGE)
-else
-PREBUILT_PVMFWIMAGE_TARGET := $(INTERNAL_PREBUILT_PVMFWIMAGE)
-endif
-
-ifeq ($(BOARD_AVB_ENABLE),true)
-$(INSTALLED_PVMFWIMAGE_TARGET): $(PREBUILT_PVMFWIMAGE_TARGET) $(AVBTOOL) $(BOARD_AVB_PVMFW_KEY_PATH)
-	cp $< $@
-	$(AVBTOOL) add_hash_footer \
-	    --image $@ \
-	    $(call get-partition-size-argument,$(BOARD_PVMFWIMAGE_PARTITION_SIZE)) \
-	    --partition_name pvmfw $(INTERNAL_AVB_PVMFW_SIGNING_ARGS) \
-	    $(BOARD_AVB_PVMFW_ADD_HASH_FOOTER_ARGS)
+INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img)
+INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key)
 
 $(call declare-1p-container,$(INSTALLED_PVMFWIMAGE_TARGET),)
-$(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(PREBUILT_PVMFWIMAGE_TARGET),$(PRODUCT_OUT)/:/)
+$(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(INTERNAL_PVMFWIMAGE_FILES),$(PRODUCT_OUT)/:/)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_PVMFWIMAGE_TARGET)
-else
-$(eval $(call copy-one-file,$(PREBUILT_PVMFWIMAGE_TARGET),$(INSTALLED_PVMFWIMAGE_TARGET)))
-endif
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_PVMFWIMAGE_TARGET)
+
+$(eval $(call copy-one-file,$(INTERNAL_PVMFWIMAGE_FILES),$(INSTALLED_PVMFWIMAGE_TARGET)))
 
 $(INSTALLED_PVMFWIMAGE_TARGET): $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET)
 
@@ -4324,18 +4345,6 @@
     $(eval $(call check-and-set-custom-avb-chain-args,$(partition))))
 endif
 
-# Add kernel cmdline descriptor for kernel to mount system.img as root with
-# dm-verity. This works when system.img is either chained or not-chained:
-# - chained: The --setup_as_rootfs_from_kernel option will add dm-verity kernel
-#   cmdline descriptor to system.img
-# - not-chained: The --include_descriptors_from_image option for make_vbmeta_image
-#   will include the kernel cmdline descriptor from system.img into vbmeta.img
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-ifeq ($(filter system, $(BOARD_SUPER_PARTITION_PARTITION_LIST)),)
-BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --setup_as_rootfs_from_kernel
-endif
-endif
-
 BOARD_AVB_MAKE_VBMETA_IMAGE_ARGS += --padding_size 4096
 BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS += --padding_size 4096
 BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS += --padding_size 4096
@@ -4461,7 +4470,7 @@
 
 $(call declare-1p-container,$(INSTALLED_VBMETA_VENDORIMAGE_TARGET),)
 
-UNMOUNTED_NOTICE_DEPS += $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
 endif
 
 define build-vbmetaimage-target
@@ -4541,6 +4550,26 @@
 intermediates := $(call intermediates-dir-for,PACKAGING,check_vintf_all)
 check_vintf_all_deps :=
 
+APEX_OUT := $(PRODUCT_OUT)/apex
+# -----------------------------------------------------------------
+# Create apex-info-file.xsd
+
+APEX_DIRS := \
+  $(TARGET_OUT)/apex/% \
+  $(TARGET_OUT_SYSTEM_EXT)/apex/% \
+  $(TARGET_OUT_VENDOR)/apex/% \
+  $(TARGET_OUT_ODM)/apex/% \
+  $(TARGET_OUT_PRODUCT)/apex/% \
+
+apex_vintf_files := $(sort $(filter $(APEX_DIRS), $(INTERNAL_ALLIMAGES_FILES)))
+APEX_INFO_FILE   := $(APEX_OUT)/apex-info-list.xml
+
+$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/dump_apex_info $(apex_vintf_files)
+	@echo "Creating apex-info-file in $(PRODUCT_OUT) "
+	$< --root_dir $(PRODUCT_OUT) --out_file $@
+
+apex_vintf_files :=
+
 # The build system only writes VINTF metadata to */etc/vintf paths. Legacy paths aren't needed here
 # because they are only used for prebuilt images.
 check_vintf_common_srcs_patterns := \
@@ -4551,6 +4580,7 @@
   $(TARGET_OUT_SYSTEM_EXT)/etc/vintf/% \
 
 check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns),$(INTERNAL_ALLIMAGES_FILES)))
+check_vintf_common_srcs += $(APEX_INFO_FILE)
 check_vintf_common_srcs_patterns :=
 
 check_vintf_has_system :=
@@ -4572,14 +4602,20 @@
 check_vintf_all_deps += $(check_vintf_system_log)
 $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
 	@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
+$(call declare-0p-target,$(check_vintf_system_log))
 check_vintf_system_log :=
 
-vintffm_log := $(intermediates)/vintffm.log
+# -- Check framework manifest against frozen manifests for GSI targets. They need to be compatible.
+ifneq (true, $(BUILDING_VENDOR_IMAGE))
+    vintffm_log := $(intermediates)/vintffm.log
+endif
 check_vintf_all_deps += $(vintffm_log)
 $(vintffm_log): $(HOST_OUT_EXECUTABLES)/vintffm $(check_vintf_system_deps)
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
 	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
+$(call declare-0p-target,$(vintffm_log))
+
 endif # check_vintf_system_deps
 check_vintf_system_deps :=
 
@@ -4601,6 +4637,7 @@
 	  ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) \
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       > $@ 2>&1 ) || ( cat $@ && exit 1 ); )
+$(call declare-0p-target,$(check_vintf_vendor_log))
 check_vintf_vendor_log :=
 endif # check_vintf_vendor_deps
 check_vintf_vendor_deps :=
@@ -4622,6 +4659,9 @@
 $(BUILT_KERNEL_VERSION_FILE):
 	echo $(BOARD_KERNEL_VERSION) > $@
 
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-0p-target,$(BUILT_KERNEL_VERSION_FILE))
+
 my_board_extracted_kernel := true
 endif # BOARD_KERNEL_VERSION
 endif # BOARD_KERNEL_CONFIG_FILE
@@ -4645,6 +4685,8 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+
 my_board_extracted_kernel := true
 endif # INSTALLED_KERNEL_TARGET
 endif # my_board_extracted_kernel
@@ -4664,6 +4706,8 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
+$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+
 my_board_extracted_kernel := true
 endif # INSTALLED_BOOTIMAGE_TARGET
 endif # my_board_extracted_kernel
@@ -4719,10 +4763,12 @@
   --dirmap /odm:$(TARGET_OUT_ODM) \
   --dirmap /product:$(TARGET_OUT_PRODUCT) \
   --dirmap /system_ext:$(TARGET_OUT_SYSTEM_EXT) \
+  --dirmap /apex:$(APEX_OUT) \
 
 ifdef PRODUCT_SHIPPING_API_LEVEL
 check_vintf_compatible_args += --property ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
 endif # PRODUCT_SHIPPING_API_LEVEL
+check_vintf_compatible_args += --apex-info-file $(APEX_INFO_FILE)
 
 $(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_ARGS := $(check_vintf_compatible_args)
 $(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_DEPS := $(check_vintf_compatible_deps)
@@ -4753,6 +4799,8 @@
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       >> $@ 2>&1 ) || (cat $@ && exit 1); ))
 
+$(call declare-0p-target,$(check_vintf_compatible_log))
+
 check_vintf_compatible_log :=
 check_vintf_compatible_args :=
 check_vintf_compatible_deps :=
@@ -4816,6 +4864,8 @@
 	  $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/misc_info.txt, \
 	  $@)
 
+$(call declare-0p-target,$(check_all_partition_sizes_log))
+
 .PHONY: check-all-partition-sizes
 check-all-partition-sizes: $(check_all_partition_sizes_log)
 
@@ -4930,9 +4980,9 @@
   mke2fs \
   mke2fs.conf \
   mkfs.erofs \
-  mkf2fsuserimg.sh \
+  mkf2fsuserimg \
   mksquashfs \
-  mksquashfsimage.sh \
+  mksquashfsimage \
   mkuserimg_mke2fs \
   ota_extractor \
   ota_from_target_files \
@@ -4963,6 +5013,7 @@
   apex_compression_tool \
   deapexer \
   debugfs_static \
+  dump_apex_info \
   merge_zips \
   resize2fs \
   soong_zip \
@@ -4990,7 +5041,13 @@
 
 INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find build/make/target/product/security -type f -name "*.x509.pem" -o \
-      -name "*.pk8" -o -name verity_key))
+      -name "*.pk8"))
+
+ifneq (,$(wildcard packages/modules))
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
+  $(sort $(shell find packages/modules -type f -name "*.x509.pem" -o -name "*.pk8" -o -name \
+      "key.pem"))
+endif
 
 ifneq (,$(wildcard device))
 INTERNAL_OTATOOLS_PACKAGE_FILES += \
@@ -5008,8 +5065,8 @@
 endif
 
 INTERNAL_OTATOOLS_RELEASETOOLS := \
-  $(sort $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
-      \( -type f -o -type l \) -print))
+  $(shell find build/make/tools/releasetools -name "*.pyc" -prune -o \
+      \( -type f -o -type l \) -print | sort)
 
 BUILT_OTATOOLS_PACKAGE := $(PRODUCT_OUT)/otatools.zip
 $(BUILT_OTATOOLS_PACKAGE): PRIVATE_ZIP_ROOT := $(call intermediates-dir-for,PACKAGING,otatools)/otatools
@@ -5025,6 +5082,9 @@
 	cp $(SOONG_ZIP) $(ZIP2ZIP) $(MERGE_ZIPS) $(PRIVATE_ZIP_ROOT)/bin/
 	$(SOONG_ZIP) -o $@ -C $(PRIVATE_ZIP_ROOT) -D $(PRIVATE_ZIP_ROOT)
 
+$(call declare-1p-container,$(BUILT_OTATOOLS_PACKAGE),build)
+$(call declare-container-license-deps,$(INTERNAL_OTATOOLS_PACKAGE_FILES) $(INTERNAL_OTATOOLS_RELEASETOOLS),$(BUILT_OTATOOLS_PACKAGE):)
+
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
 
@@ -5191,7 +5251,7 @@
 endif # BOARD_AVB_VBMETA_SYSTEM
 ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
 	$(hide) echo "avb_vbmeta_vendor=$(BOARD_AVB_VBMETA_VENDOR)" >> $@
-	$(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_SYSTEM_IMAGE_ARGS)" >> $@
+	$(hide) echo "avb_vbmeta_vendor_args=$(BOARD_AVB_MAKE_VBMETA_VENDOR_IMAGE_ARGS)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_key_path=$(BOARD_AVB_VBMETA_VENDOR_KEY_PATH)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $@
@@ -5318,10 +5378,12 @@
 tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
-updaer_dep :=
+updater_dep :=
 ifeq ($(AB_OTA_UPDATER),true)
 updater_dep += system/update_engine/update_engine.conf
+$(call declare-1p-target,system/update_engine/update_engine.conf,system/update_engine)
 updater_dep += external/zucchini/version_info.h
+$(call declare-license-metadata,external/zucchini/version_info.h,legacy_notice,notice,external/zucchini/LICENSE,external/zucchini)
 updater_dep += $(HOST_OUT_SHARED_LIBRARIES)/liblz4.so
 endif
 
@@ -5650,10 +5712,8 @@
 	    $(TARGET_ROOT_OUT),$(zip_root)/ROOT)
 	@# If we are using recovery as boot, this is already done when processing recovery.
 ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
 	$(hide) $(call package_files-copy-root, \
 	    $(TARGET_RAMDISK_OUT),$(zip_root)/BOOT/RAMDISK)
-endif
 ifdef INSTALLED_KERNEL_TARGET
 	$(hide) cp $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/
 endif
@@ -5959,10 +6019,8 @@
 endif
 	@# ROOT always contains the files for the root under normal boot.
 	$(hide) $(call fs_config,$(zip_root)/ROOT,) > $(zip_root)/META/root_filesystem_config.txt
-ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-	@# BOOT/RAMDISK exists and contains the ramdisk for recovery if using BOARD_USES_RECOVERY_AS_BOOT.
+	@# BOOT/RAMDISK contains the first stage and recovery ramdisk.
 	$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
-endif
 ifdef BUILDING_INIT_BOOT_IMAGE
 	$(hide) $(call package_files-copy-root, $(TARGET_RAMDISK_OUT),$(zip_root)/INIT_BOOT/RAMDISK)
 	$(hide) $(call fs_config,$(zip_root)/INIT_BOOT/RAMDISK,) > $(zip_root)/META/init_boot_filesystem_config.txt
@@ -5973,10 +6031,6 @@
 ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
 	$(call fs_config,$(zip_root)/VENDOR_BOOT/RAMDISK,) > $(zip_root)/META/vendor_boot_filesystem_config.txt
 endif
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-	@# BOOT/RAMDISK also exists and contains the first stage ramdisk if not using BOARD_BUILD_SYSTEM_ROOT_IMAGE.
-	$(hide) $(call fs_config,$(zip_root)/BOOT/RAMDISK,) > $(zip_root)/META/boot_filesystem_config.txt
-endif
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
 endif
@@ -6009,17 +6063,54 @@
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
 
+$(call declare-1p-container,$(BUILT_TARGET_FILES_PACKAGE),)
+$(call declare-container-license-deps,$(BUILT_TARGET_FILES_PACKAGE), $(INSTALLED_RADIOIMAGE_TARGET) \
+            $(INSTALLED_RECOVERYIMAGE_TARGET) \
+            $(INSTALLED_CACHEIMAGE_TARGET) \
+            $(INSTALLED_DTBOIMAGE_TARGET) \
+            $(INSTALLED_PVMFWIMAGE_TARGET) \
+            $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) \
+            $(INSTALLED_CUSTOMIMAGES_TARGET) \
+            $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
+            $(INSTALLED_KERNEL_TARGET) \
+            $(INSTALLED_RAMDISK_TARGET) \
+            $(INSTALLED_DTBIMAGE_TARGET) \
+            $(INSTALLED_2NDBOOTLOADER_TARGET) \
+            $(BOARD_PREBUILT_DTBOIMAGE) \
+            $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
+            $(BOARD_RECOVERY_ACPIO) \
+            $(PRODUCT_SYSTEM_BASE_FS_PATH) \
+            $(PRODUCT_VENDOR_BASE_FS_PATH) \
+            $(PRODUCT_PRODUCT_BASE_FS_PATH) \
+            $(PRODUCT_SYSTEM_EXT_BASE_FS_PATH) \
+            $(PRODUCT_ODM_BASE_FS_PATH) \
+            $(PRODUCT_VENDOR_DLKM_BASE_FS_PATH) \
+            $(PRODUCT_ODM_DLKM_BASE_FS_PATH) \
+            $(PRODUCT_SYSTEM_DLKM_BASE_FS_PATH) \
+            $(LPMAKE) \
+            $(SELINUX_FC) \
+            $(INSTALLED_MISC_INFO_TARGET) \
+            $(APKCERTS_FILE) \
+            $(SOONG_APEX_KEYS_FILE) \
+            $(HOST_OUT_EXECUTABLES)/fs_config \
+            $(ADD_IMG_TO_TARGET_FILES) \
+            $(MAKE_RECOVERY_PATCH) \
+            $(BUILT_KERNEL_CONFIGS_FILE) \
+            $(BUILT_KERNEL_VERSION_FILE),$(BUILT_TARGET_FILES_PACKAGE):)
+
 $(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
 
 # -----------------------------------------------------------------
 # NDK Sysroot Package
 NDK_SYSROOT_TARGET := $(PRODUCT_OUT)/ndk_sysroot.tar.bz2
+.PHONY: ndk_sysroot
+ndk_sysroot: $(NDK_SYSROOT_TARGET)
 $(NDK_SYSROOT_TARGET): $(SOONG_OUT_DIR)/ndk.timestamp
 	@echo Package NDK sysroot...
 	$(hide) tar cjf $@ -C $(SOONG_OUT_DIR) ndk
 
 ifeq ($(HOST_OS),linux)
-$(call dist-for-goals,sdk,$(NDK_SYSROOT_TARGET))
+$(call dist-for-goals,sdk ndk_sysroot,$(NDK_SYSROOT_TARGET))
 endif
 
 ifeq ($(build_ota_package),true)
@@ -6048,12 +6139,17 @@
 INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 INTERNAL_OTA_METADATA := $(PRODUCT_OUT)/ota_metadata
 
+$(call declare-0p-target,$(INTERNAL_OTA_METADATA))
+
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 $(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
 $(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
 
+$(call declare-1p-container,$(INTERNAL_OTA_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
 .PHONY: otapackage
 otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
 
@@ -6069,6 +6165,9 @@
 	@echo "Package OTA (retrofit dynamic partitions): $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --retrofit_dynamic_partitions)
 
+$(call declare-1p-container,$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
 .PHONY: otardppackage
 
 otapackage otardppackage: $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET)
@@ -6084,6 +6183,10 @@
 	@echo "Package partial OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
 
+$(call declare-1p-container,$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES),$(PRODUCT_OUT)/:/)
+
+
 .PHONY: partialotapackage
 partialotapackage: $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET)
 
@@ -6114,7 +6217,7 @@
 # The mac build doesn't build dex2oat, so create the zip file only if the build OS is linux.
 ifeq ($(BUILD_OS),linux)
 ifneq ($(DEX2OAT),)
-dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN) $(AAPT2)
+dexpreopt_tools_deps := $(DEXPREOPT_GEN_DEPS) $(DEXPREOPT_GEN)
 dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/dexdump
 dexpreopt_tools_deps += $(HOST_OUT_EXECUTABLES)/oatdump
 DEXPREOPT_TOOLS_ZIP := $(PRODUCT_OUT)/dexpreopt_tools.zip
@@ -6123,6 +6226,7 @@
 $(DEXPREOPT_TOOLS_ZIP): $(SOONG_ZIP)
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(SOONG_ZIP) -d -o $@ -j $(addprefix -f ,$(PRIVATE_DEXPREOPT_TOOLS_DEPS)) -f $$(realpath $(DEX2OAT))
+$(call declare-1p-target,$(DEXPREOPT_TOOLS_ZIP),)
 endif # DEX2OAT is set
 endif # BUILD_OS == linux
 
@@ -6155,28 +6259,49 @@
 .PHONY: dexpreopt_config_zip
 dexpreopt_config_zip: $(DEXPREOPT_CONFIG_ZIP)
 
+$(call declare-1p-target,$(DEXPREOPT_CONFIG_ZIP),)
+
 # -----------------------------------------------------------------
 # A zip of the symbols directory.  Keep the full paths to make it
 # more obvious where these files came from.
+# Also produces a textproto containing mappings from elf IDs to symbols
+# filename, which will allow finding the appropriate symbols to deobfuscate
+# a stack trace frame.
 #
+
 name := $(TARGET_PRODUCT)
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-symbols-$(FILE_NAME_TAG)
 
-SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name).zip
+# The path to the zip file containing binaries with symbols.
+SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name)-symbols-$(FILE_NAME_TAG).zip
+# The path to a file containing mappings from elf IDs to filenames.
+SYMBOLS_MAPPING := $(PRODUCT_OUT)/$(name)-symbols-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := SYMBOLS_ZIP SYMBOLS_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(SYMBOLS_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(SYMBOLS_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,symbols)/filelist
-$(SYMBOLS_ZIP): $(SOONG_ZIP)
+$(SYMBOLS_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)
+$(SYMBOLS_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Package symbols: $@"
 	$(hide) rm -rf $@ $(PRIVATE_LIST_FILE)
-	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE))
+	$(hide) mkdir -p $(TARGET_OUT_UNSTRIPPED) $(dir $(PRIVATE_LIST_FILE)) $(PRIVATE_MAPPING_PACKAGING_DIR)
+	# Find all of the files in the symbols directory and zip them into the symbols zip.
 	$(hide) find -L $(TARGET_OUT_UNSTRIPPED) -type f | sort >$(PRIVATE_LIST_FILE)
 	$(hide) $(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(OUT_DIR)/.. -l $(PRIVATE_LIST_FILE)
+	# Find all of the files in the symbols mapping directory and merge them into the symbols mapping textproto.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(hide) $(SYMBOLS_MAP) -merge $(SYMBOLS_MAPPING) -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(SYMBOLS_ZIP): .KATI_IMPLICIT_OUTPUTS := $(SYMBOLS_MAPPING)
+
+$(call declare-1p-container,$(SYMBOLS_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(SYMBOLS_ZIP),$(INTERNAL_ALLIMAGES_FILES) $(updater_dep),$(PRODUCT_OUT)/:/)
+endif
+
 # -----------------------------------------------------------------
 # A zip of the coverage directory.
 #
@@ -6197,7 +6322,9 @@
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(TARGET_OUT_COVERAGE) -l $(PRIVATE_LIST_FILE)
 
 $(call declare-1p-container,$(COVERAGE_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(call declare-container-license-deps,$(COVERAGE_ZIP),$(INTERNAL_ALLIMAGE_FILES),$(PRODUCT_OUT)/:/)
+endif
 
 SYSTEM_NOTICE_DEPS += $(COVERAGE_ZIP)
 
@@ -6207,7 +6334,7 @@
 ifeq (true,$(CLANG_COVERAGE))
   LLVM_PROFDATA := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-profdata
   LLVM_COV := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-cov
-  LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/libc++.so.1
+  LIBCXX := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/x86_64-unknown-linux-gnu/libc++.so.1
   # Use llvm-profdata.zip for backwards compatibility with tradefed code.
   LLVM_COVERAGE_TOOLS_ZIP := $(PRODUCT_OUT)/llvm-profdata.zip
 
@@ -6269,17 +6396,40 @@
 
 #------------------------------------------------------------------
 # A zip of Proguard obfuscation dictionary files.
+# Also produces a textproto containing mappings from the hashes of the
+# dictionary contents (which are also stored in the dex files on the
+# devices) to the filename of the proguard dictionary, which will allow
+# finding the appropriate dictionary to deobfuscate a stack trace frame.
 #
+
+# The path to the zip file containing proguard dictionaries.
 PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
+# The path to the zip file containing mappings from dictionary hashes to filenames.
+PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping-$(FILE_NAME_TAG).textproto
+.KATI_READONLY := PROGUARD_DICT_ZIP PROGUARD_DICT_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(PROGUARD_DICT_ZIP): $(INTERNAL_ALLIMAGES_FILES) $(updater_dep)
 endif
 $(PROGUARD_DICT_ZIP): PRIVATE_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary)
-$(PROGUARD_DICT_ZIP): $(SOONG_ZIP)
+$(PROGUARD_DICT_ZIP): PRIVATE_MAPPING_PACKAGING_DIR := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)
+$(PROGUARD_DICT_ZIP): PRIVATE_LIST_FILE := $(call intermediates-dir-for,PACKAGING,proguard_dictionary_filelist)/filelist
+$(PROGUARD_DICT_ZIP): $(SOONG_ZIP) $(SYMBOLS_MAP)
 	@echo "Packaging Proguard obfuscation dictionary files."
-	mkdir -p $(dir $@) $(PRIVATE_PACKAGING_DIR)
-	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -P out/target/common/obj -D $(PRIVATE_PACKAGING_DIR)
+	rm -rf $@ $(PRIVATE_LIST_FILE)
+	mkdir -p $(PRIVATE_PACKAGING_DIR) $(PRIVATE_MAPPING_PACKAGING_DIR) $(dir $(PRIVATE_LIST_FILE))
+	# Zip all of the files in the proguard dictionary directory.
+	$(SOONG_ZIP) --ignore_missing_files -d -o $@ -C $(PRIVATE_PACKAGING_DIR) -D $(PRIVATE_PACKAGING_DIR)
+	# Find all of the files in the proguard dictionary mapping directory and merge them into the mapping textproto.
+	# Strip the PRIVATE_PACKAGING_DIR off the filenames to match soong_zip's -C argument.
+	$(hide) find -L $(PRIVATE_MAPPING_PACKAGING_DIR) -type f | sort >$(PRIVATE_LIST_FILE)
+	$(SYMBOLS_MAP) -merge $(PROGUARD_DICT_MAPPING) -strip_prefix $(PRIVATE_PACKAGING_DIR)/ -ignore_missing_files @$(PRIVATE_LIST_FILE)
+$(PROGUARD_DICT_ZIP): .KATI_IMPLICIT_OUTPUTS := $(PROGUARD_DICT_MAPPING)
+
+$(call declare-1p-container,$(PROGUARD_DICT_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(PROGUARD_DICT_ZIP),$(INTERNAL_ALLIMAGES_FILES) $(updater_dep),$(PRODUCT_OUT)/:/)
+endif
 
 #------------------------------------------------------------------
 # A zip of Proguard usage files.
@@ -6310,6 +6460,23 @@
 	find $(PRIVATE_PACKAGING_DIR) -name proguard_usage.zip > $(PRIVATE_LIST_FILE)
 	$(MERGE_ZIPS) $@ @$(PRIVATE_LIST_FILE)
 
+$(call declare-1p-container,$(PROGUARD_USAGE_ZIP),)
+ifeq (,$(TARGET_BUILD_UNBUNDLED))
+$(call declare-container-license-deps,$(PROGUARD_USAGE_ZIP),$(INSTALLED_SYSTEMIMAGE_TARGET) \
+    $(INSTALLED_RAMDISK_TARGET) \
+    $(INSTALLED_BOOTIMAGE_TARGET) \
+    $(INSTALLED_INIT_BOOT_IMAGE_TARGET) \
+    $(INSTALLED_USERDATAIMAGE_TARGET) \
+    $(INSTALLED_VENDORIMAGE_TARGET) \
+    $(INSTALLED_PRODUCTIMAGE_TARGET) \
+    $(INSTALLED_SYSTEM_EXTIMAGE_TARGET) \
+    $(INSTALLED_ODMIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_DLKMIMAGE_TARGET) \
+    $(INSTALLED_ODM_DLKMIMAGE_TARGET) \
+    $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET) \
+    $(updater_dep),$(PROGUARD_USAGE_ZIP):/)
+endif
+
 ifeq (true,$(PRODUCT_USE_DYNAMIC_PARTITIONS))
 
 # Dump variables used by build_super_image.py (for building super.img and super_empty.img).
@@ -6385,22 +6552,22 @@
 endif
 endif
 
-# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
-# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
-# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
-# virtual devices.
-ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
 $(INSTALLED_SUPERIMAGE_TARGET): $(INSTALLED_SUPERIMAGE_DEPENDENCIES)
 	$(call pretty,"Target super fs image for debug: $@")
 	$(call build-superimage-target,$(INSTALLED_SUPERIMAGE_TARGET),\
           $(call intermediates-dir-for,PACKAGING,superimage_debug)/misc_info.txt)
 
-droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
-
 # For devices that uses super image directly, the superimage target points to the file in $(PRODUCT_OUT).
 .PHONY: superimage
 superimage: $(INSTALLED_SUPERIMAGE_TARGET)
 
+# If BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT is set, super.img is built from images in the
+# $(PRODUCT_OUT) directory, and is built to $(PRODUCT_OUT)/super.img. Also, it will
+# be built for non-dist builds. This is useful for devices that uses super.img directly, e.g.
+# virtual devices.
+ifeq (true,$(BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT))
+droidcore-unbundled: $(INSTALLED_SUPERIMAGE_TARGET)
+
 $(call dist-for-goals,dist_files,$(INSTALLED_MISC_INFO_TARGET):super_misc_info.txt)
 endif # BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT
 
@@ -6455,6 +6622,9 @@
 	        --additional IMAGES/VerifiedBootParams.textproto:VerifiedBootParams.textproto \
 	        $(BUILT_TARGET_FILES_PACKAGE) $@
 
+$(call declare-1p-container,$(INTERNAL_UPDATE_PACKAGE_TARGET),)
+$(call declare-container-license-deps,$(INTERNAL_UPDATE_PACKAGE_TARGET),$(BUILT_TARGET_FILES_PACKAGE) $(IMG_FROM_TARGET_FILES),$(PRODUCT_OUT)/:/)
+
 .PHONY: updatepackage
 updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
 $(call dist-for-goals,updatepackage,$(INTERNAL_UPDATE_PACKAGE_TARGET))
@@ -6626,8 +6796,6 @@
 # if we don't have a real list, then use "everything"
 ifeq ($(strip $(ATREE_FILES)),)
 ATREE_FILES := \
-	$(ALL_DEFAULT_INSTALLED_MODULES) \
-	$(INSTALLED_RAMDISK_TARGET) \
 	$(ALL_DOCS) \
 	$(ALL_SDK_FILES)
 endif
@@ -6651,36 +6819,35 @@
 sdk_atree_files += $(atree_dir)/sdk.atree
 endif
 
-include $(BUILD_SYSTEM)/sdk_font.mk
-
 deps := \
-	$(target_notice_file_txt) \
 	$(OUT_DOCS)/offline-sdk-timestamp \
 	$(SDK_METADATA_FILES) \
-	$(SYMBOLS_ZIP) \
-	$(COVERAGE_ZIP) \
-	$(APPCOMPAT_ZIP) \
-	$(INSTALLED_SYSTEMIMAGE_TARGET) \
-	$(INSTALLED_QEMU_SYSTEMIMAGE) \
-	$(INSTALLED_QEMU_RAMDISKIMAGE) \
-	$(INSTALLED_QEMU_VENDORIMAGE) \
-	$(QEMU_VERIFIED_BOOT_PARAMS) \
-	$(INSTALLED_USERDATAIMAGE_TARGET) \
-	$(INSTALLED_RAMDISK_TARGET) \
-	$(INSTALLED_SDK_BUILD_PROP_TARGET) \
-	$(INSTALLED_BUILD_PROP_TARGET) \
+  $(INSTALLED_SDK_BUILD_PROP_TARGET) \
 	$(ATREE_FILES) \
 	$(sdk_atree_files) \
 	$(HOST_OUT_EXECUTABLES)/atree \
-	$(HOST_OUT_EXECUTABLES)/line_endings \
-	$(SDK_FONT_DEPS)
+	$(HOST_OUT_EXECUTABLES)/line_endings
+
+# The name of the subdir within the platforms dir of the sdk. One of:
+# - android-<SDK_INT> (stable base dessert SDKs)
+# - android-<CODENAME> (stable extension SDKs)
+# - android-<SDK_INT>-ext<EXT_INT> (codename SDKs)
+sdk_platform_dir_name := $(strip \
+  $(if $(filter REL,$(PLATFORM_VERSION_CODENAME)), \
+    $(if $(filter $(PLATFORM_SDK_EXTENSION_VERSION),$(PLATFORM_BASE_SDK_EXTENSION_VERSION)), \
+      android-$(PLATFORM_SDK_VERSION), \
+      android-$(PLATFORM_SDK_VERSION)-ext$(PLATFORM_SDK_EXTENSION_VERSION) \
+    ), \
+    android-$(PLATFORM_VERSION_CODENAME) \
+  ) \
+)
 
 INTERNAL_SDK_TARGET := $(sdk_dir)/$(sdk_name).zip
 $(INTERNAL_SDK_TARGET): PRIVATE_NAME := $(sdk_name)
 $(INTERNAL_SDK_TARGET): PRIVATE_DIR := $(sdk_dir)/$(sdk_name)
 $(INTERNAL_SDK_TARGET): PRIVATE_DEP_FILE := $(sdk_dep_file)
 $(INTERNAL_SDK_TARGET): PRIVATE_INPUT_FILES := $(sdk_atree_files)
-
+$(INTERNAL_SDK_TARGET): PRIVATE_PLATFORM_NAME := $(sdk_platform_dir_name)
 # Set SDK_GNU_ERROR to non-empty to fail when a GNU target is built.
 #
 #SDK_GNU_ERROR := true
@@ -6696,7 +6863,6 @@
 	  fi; \
 	done; \
 	if [ $$FAIL ]; then exit 1; fi
-	$(hide) echo $(notdir $(SDK_FONT_DEPS)) | tr " " "\n"  > $(SDK_FONT_TEMP)/fontsInSdk.txt
 	$(hide) ( \
 	    ATREE_STRIP="$(HOST_STRIP) -x" \
 	    $(HOST_OUT_EXECUTABLES)/atree \
@@ -6706,16 +6872,13 @@
 	        -I $(PRODUCT_OUT) \
 	        -I $(HOST_OUT) \
 	        -I $(TARGET_COMMON_OUT_ROOT) \
-	        -v "PLATFORM_NAME=android-$(PLATFORM_VERSION)" \
+	        -v "PLATFORM_NAME=$(PRIVATE_PLATFORM_NAME)" \
 	        -v "OUT_DIR=$(OUT_DIR)" \
 	        -v "HOST_OUT=$(HOST_OUT)" \
 	        -v "TARGET_ARCH=$(TARGET_ARCH)" \
 	        -v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
 	        -v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
-	        -v "FONT_OUT=$(SDK_FONT_TEMP)" \
 	        -o $(PRIVATE_DIR) && \
-	    cp -f $(target_notice_file_txt) \
-	            $(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
 	    HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
 	        development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
 	    chmod -R ug+rwX $(PRIVATE_DIR) && \
@@ -6794,14 +6957,26 @@
 .PHONY: haiku
 haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
 $(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku,$(PRODUCT_OUT)/module-info.json)
 
-.PHONY: haiku-java
-haiku-java: $(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_TARGETS)
-$(call dist-for-goals,haiku-java,$(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES))
+.PHONY: haiku-java-device
+haiku-java-device: $(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_DEVICE_TARGETS)
+$(call dist-for-goals,haiku-java-device,$(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-java-device,$(PRODUCT_OUT)/module-info.json)
+
+.PHONY: haiku-java-host
+haiku-java-host: $(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_HOST_TARGETS)
+$(call dist-for-goals,haiku-java-host,$(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-java-host,$(PRODUCT_OUT)/module-info.json)
 
 .PHONY: haiku-rust
 haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
 $(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
+$(call dist-for-goals,haiku-rust,$(PRODUCT_OUT)/module-info.json)
+
+# -----------------------------------------------------------------
+# Extract platform fonts used in Layoutlib
+include $(BUILD_SYSTEM)/layoutlib_fonts.mk
 
 
 # -----------------------------------------------------------------
diff --git a/core/OWNERS b/core/OWNERS
index 8794434..d48ceab 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,9 @@
-per-file dex_preopt*.mk = ngeoffray@google.com,calin@google.com,mathewi@google.com,skvadrik@google.com
-per-file verify_uses_libraries.sh = ngeoffray@google.com,calin@google.com,skvadrik@google.com
+per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
+per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
+
+# For global Proguard rules
+per-file proguard*.flags = jdduke@google.com
 
 # For version updates
-per-file version_defaults.mk = aseaton@google.com,elisapascual@google.com,lubomir@google.com,pscovanner@google.com
+per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
 
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 254e09b..ff49262 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -87,13 +87,23 @@
   endif
 endif
 
+# TODO: Replace this hardcoded list of optional uses-libraries with build logic
+# that propagates optionality via the generated exported-sdk-libs files.
+# Hardcodng doesn't scale and enforces a single choice on each library, while in
+# reality this is a choice of the library users (which may differ).
+my_optional_sdk_lib_names := \
+    android.test.base \
+    android.test.mock \
+    androidx.window.extensions \
+    androidx.window.sidecar
+
 $(fixed_android_manifest): PRIVATE_MANIFEST_FIXER_FLAGS := $(my_manifest_fixer_flags)
 # These two libs are added as optional dependencies (<uses-library> with
 # android:required set to false). This is because they haven't existed in pre-P
 # devices, but classes in them were in bootclasspath jars, etc. So making them
 # hard dependencies (andriod:required=true) would prevent apps from being
 # installed to such legacy devices.
-$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := android.test.base android.test.mock
+$(fixed_android_manifest): PRIVATE_OPTIONAL_SDK_LIB_NAMES := $(my_optional_sdk_lib_names)
 $(fixed_android_manifest): $(MANIFEST_FIXER)
 $(fixed_android_manifest): $(main_android_manifest)
 	echo $(PRIVATE_OPTIONAL_SDK_LIB_NAMES) | tr ' ' '\n' > $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
@@ -109,3 +119,5 @@
 	   ) \
 	  $< $@
 	rm $(PRIVATE_EXPORTED_SDK_LIBS_FILE).optional
+
+my_optional_sdk_lib_names :=
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 6c32da4..9f305cf 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -26,6 +26,7 @@
 
 # Add variables to the namespace below:
 
+$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
 $(call add_soong_config_var,ANDROID,IS_TARGET_MIXED_SEPOLICY)
 ifeq ($(IS_TARGET_MIXED_SEPOLICY),true)
@@ -33,7 +34,6 @@
 endif
 $(call add_soong_config_var,ANDROID,BOARD_USES_ODMIMAGE)
 $(call add_soong_config_var,ANDROID,BOARD_USES_RECOVERY_AS_BOOT)
-$(call add_soong_config_var,ANDROID,BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 $(call add_soong_config_var,ANDROID,PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT)
 
 # Default behavior for the tree wrt building modules or using prebuilts. This
@@ -43,13 +43,13 @@
 
 ifneq (,$(MODULE_BUILD_FROM_SOURCE))
   # Keep an explicit setting.
-else ifeq (,$(filter sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
+else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
   # Prebuilt module SDKs require prebuilt modules to work, and currently
   # prebuilt modules are only provided for com.google.android.xxx. If we can't
   # find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
   # and disable prebuilt SDKs. In particular this applies to AOSP builds.
   #
-  # However, sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
+  # However, docs/sdk/win_sdk/sdk_addon builds might not include com.google.android.xxx
   # packages, so for those we respect the default behavior.
   MODULE_BUILD_FROM_SOURCE := true
 else ifneq (,$(PRODUCT_MODULE_BUILD_FROM_SOURCE))
@@ -71,6 +71,19 @@
 
 $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
 
+# Ensure that those mainline modules who have individually toggleable prebuilts
+# are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
+# default.
+INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
+  bluetooth \
+  permission \
+  uwb \
+  wifi \
+
+$(foreach m, $(INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES),\
+  $(if $(call soong_config_get,$(m)_module,source_build),,\
+    $(call soong_config_set,$(m)_module,source_build,$(MODULE_BUILD_FROM_SOURCE))))
+
 # Apex build mode variables
 ifdef APEX_BUILD_FOR_PRE_S_DEVICES
 $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
@@ -88,6 +101,28 @@
 # TODO(b/203088572): Remove when Java optimizations enabled by default for
 # SystemUI.
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
-# TODO(b/196084106): Remove when Java optimizations enabled by default for
-# system packages.
+
+# Enable system_server optimizations by default unless explicitly set or if
+# there may be dependent runtime jars.
+# TODO(b/240588226): Remove the off-by-default exceptions after handling
+# system_server jars automatically w/ R8.
+ifeq (true,$(PRODUCT_BROKEN_SUBOPTIMAL_ORDER_OF_SYSTEM_SERVER_JARS))
+  # If system_server jar ordering is broken, don't assume services.jar can be
+  # safely optimized in isolation, as there may be dependent jars.
+  SYSTEM_OPTIMIZE_JAVA ?= false
+else ifneq (platform:services,$(lastword $(PRODUCT_SYSTEM_SERVER_JARS)))
+  # If services is not the final jar in the dependency ordering, don't assume
+  # it can be safely optimized in isolation, as there may be dependent jars.
+  SYSTEM_OPTIMIZE_JAVA ?= false
+else
+  SYSTEM_OPTIMIZE_JAVA ?= true
+endif
 $(call add_soong_config_var,ANDROID,SYSTEM_OPTIMIZE_JAVA)
+
+# Check for SupplementalApi module.
+ifeq ($(wildcard packages/modules/SupplementalApi),)
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,false)
+else
+$(call add_soong_config_var_value,ANDROID,include_nonpublic_framework_api,true)
+endif
+
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index 79639a8..eb429cd 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -128,6 +128,9 @@
     LOCAL_CERTIFICATE := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))$(LOCAL_CERTIFICATE)
   endif
 
+  # NOTE(ruperts): Consider moving the logic below out of a conditional,
+  # to avoid the possibility of silently ignoring user settings.
+
   PACKAGES.$(LOCAL_MODULE).PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
   PACKAGES.$(LOCAL_MODULE).CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
   PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
@@ -142,6 +145,8 @@
 
   $(built_module): $(LOCAL_CERTIFICATE_LINEAGE)
   $(built_module): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
+
+  $(built_module): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
 endif
 
 ifneq ($(LOCAL_MODULE_STEM),)
@@ -275,7 +280,7 @@
 endif
 my_src_dir := $(LOCAL_PATH)/$(my_src_dir)
 
-$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem
+$(built_apk_splits) : $(LOCAL_CERTIFICATE).pk8 $(LOCAL_CERTIFICATE).x509.pem | $(ZIPALIGN) $(ZIP2ZIP) $(SIGNAPK_JAR) $(SIGNAPK_JNI_LIBRARY_PATH)
 $(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
 $(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
 $(built_apk_splits) : $(intermediates)/%.apk : $(my_src_dir)/%.apk
diff --git a/core/artifact_path_requirements.mk b/core/artifact_path_requirements.mk
index ceaefa2..566b9f7 100644
--- a/core/artifact_path_requirements.mk
+++ b/core/artifact_path_requirements.mk
@@ -22,6 +22,10 @@
     $(TARGET_OUT_SYSTEM_OTHER)/%.art
 endif
 
+ifneq (,$(filter-out true false relaxed strict,$(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))$(filter-out 1 0,$(words $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))))
+  $(error PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS must be one of [true, false, relaxed, strict], found: $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS))
+endif
+
 all_offending_files :=
 $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
   $(eval requirements := $(PRODUCTS.$(makefile).ARTIFACT_PATH_REQUIREMENTS)) \
@@ -46,7 +50,7 @@
   $(eval allowed_patterns := $(call resolve-product-relative-paths,$(allowed))) \
   $(eval offending_files := $(filter-out $(allowed_patterns),$(files_in_requirement))) \
   $(eval enforcement := $(PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS)) \
-  $(if $(enforcement),\
+  $(if $(filter-out false,$(enforcement)),\
     $(call maybe-print-list-and-error,$(offending_files),\
       $(INTERNAL_PRODUCT) produces files inside $(makefile)s artifact path requirement. \
       $(PRODUCT_ARTIFACT_PATH_REQUIREMENT_HINT)) \
diff --git a/core/base_rules.mk b/core/base_rules.mk
index c01cde8..00f5f21 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -20,7 +20,11 @@
 # Users can define base-rules-hook in their buildspec.mk to perform
 # arbitrary operations as each module is included.
 ifdef base-rules-hook
-$(if $(base-rules-hook),)
+  ifndef _has_warned_about_base_rules_hook
+    $(warning base-rules-hook is deprecated, please remove usages of it and/or convert to Soong.)
+    _has_warned_about_base_rules_hook := true
+  endif
+  $(if $(base-rules-hook),)
 endif
 
 ###########################################################
@@ -590,10 +594,18 @@
       my_init_rc := $(foreach rc,$(LOCAL_INIT_RC_$(my_32_64_bit_suffix)) $(LOCAL_INIT_RC),$(LOCAL_PATH)/$(rc))
     endif
     ifneq ($(strip $(my_init_rc)),)
-      # Make doesn't support recovery as an output partition, but some Soong modules installed in recovery
-      # have init.rc files that need to be installed alongside them. Manually handle the case where the
-      # output file is in the recovery partition.
-      my_init_rc_path := $(if $(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)),$(TARGET_RECOVERY_ROOT_OUT)/system/etc,$(TARGET_OUT$(partition_tag)_ETC))
+      # Make doesn't support recovery or ramdisk as an output partition,
+      # but some Soong modules installed in recovery or ramdisk
+      # have init.rc files that need to be installed alongside them.
+      # Manually handle the case where the
+      # output file is in the recovery or ramdisk partition.
+      ifneq (,$(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)))
+        my_init_rc_path := $(TARGET_RECOVERY_ROOT_OUT)/system/etc
+      else ifneq (,$(filter $(TARGET_RAMDISK_OUT)/%,$(my_module_path)))
+        my_init_rc_path := $(TARGET_RAMDISK_OUT)/system/etc
+      else
+        my_init_rc_path := $(TARGET_OUT$(partition_tag)_ETC)
+      endif
       my_init_rc_pairs := $(foreach rc,$(my_init_rc),$(rc):$(my_init_rc_path)/init/$(notdir $(rc)))
       my_init_rc_installed := $(foreach rc,$(my_init_rc_pairs),$(call word-colon,2,$(rc)))
 
@@ -713,6 +725,11 @@
 endif
 ifdef LOCAL_MULTILIB
   multi_arch := true
+# These conditionals allow this functionality to be mimicked in Soong
+else ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+  ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
+    multi_arch := true
+  endif
 endif
 
 ifdef multi_arch
@@ -995,7 +1012,11 @@
     $(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
 
 ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES := \
-    $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES)
+    $(ALL_MODULES.$(my_register_name).LOCAL_RUNTIME_LIBRARIES) $(LOCAL_RUNTIME_LIBRARIES) \
+    $(LOCAL_JAVA_LIBRARIES)
+
+ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES := \
+    $(ALL_MODULES.$(my_register_name).LOCAL_STATIC_LIBRARIES) $(LOCAL_STATIC_JAVA_LIBRARIES)
 
 ifdef LOCAL_TEST_DATA
   # Export the list of targets that are handled as data inputs and required
@@ -1019,6 +1040,24 @@
   $(filter-out $(ALL_MODULES.$(my_register_name).SUPPORTED_VARIANTS),$(my_supported_variant))
 
 ##########################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##########################################################################
+ifneq ($(LOCAL_USE_VNDK),)
+  ifneq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    apiimport_postfix := .apiimport
+    ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+      apiimport_postfix := .apiimport.product
+    else
+      apiimport_postfix := .apiimport.vendor
+    endif
+
+    my_required_modules := $(foreach l,$(my_required_modules), \
+      $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+  endif
+endif
+
+##########################################################################
 ## When compiling against the VNDK, add the .vendor or .product suffix to
 ## required modules.
 ##########################################################################
@@ -1104,6 +1143,9 @@
 ifdef LOCAL_IS_UNIT_TEST
 ALL_MODULES.$(my_register_name).IS_UNIT_TEST := $(LOCAL_IS_UNIT_TEST)
 endif
+ifdef LOCAL_TEST_OPTIONS_TAGS
+ALL_MODULES.$(my_register_name).TEST_OPTIONS_TAGS := $(LOCAL_TEST_OPTIONS_TAGS)
+endif
 test_config :=
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index 665270e..1ad9be8 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -1145,6 +1145,28 @@
     $(my_static_libraries),hwasan)
 endif
 
+###################################################################
+## When compiling against API imported module, use API import stub
+## libraries.
+##################################################################
+
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+  ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+    apiimport_postfix := .apiimport.product
+  else
+    apiimport_postfix := .apiimport.vendor
+  endif
+endif
+
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+my_header_libraries := $(foreach l,$(my_header_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_HEADER_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+
 ###########################################################
 ## When compiling against the VNDK, use LL-NDK libraries
 ###########################################################
@@ -1506,7 +1528,7 @@
         ifeq (,$(strip $(call find_warning_allowed_projects,$(LOCAL_PATH))))
           my_cflags := -Wall -Werror $(my_cflags)
         else
-          $(eval MODULES_ADDED_WALL := $(MODULES_ADDED_WALL) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
+          $(eval MODULES_WARNINGS_ALLOWED := $(MODULES_USING_WNO_ERROR) $(LOCAL_MODULE_MAKEFILE):$(LOCAL_MODULE))
           my_cflags := -Wall $(my_cflags)
         endif
       endif
diff --git a/core/board_config.mk b/core/board_config.mk
index 72a8044..70c91a8 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -174,6 +174,10 @@
 
 
 _build_broken_var_list := \
+  BUILD_BROKEN_CLANG_PROPERTY \
+  BUILD_BROKEN_CLANG_ASFLAGS \
+  BUILD_BROKEN_CLANG_CFLAGS \
+  BUILD_BROKEN_DEPFILE \
   BUILD_BROKEN_DUP_RULES \
   BUILD_BROKEN_DUP_SYSPROP \
   BUILD_BROKEN_ELF_PREBUILT_PRODUCT_COPY_FILES \
@@ -234,10 +238,7 @@
   .KATI_READONLY := TARGET_DEVICE_DIR
 endif
 
-# TODO(colefaust) change this if to RBC_PRODUCT_CONFIG when
-# the board configuration is known to work on everything
-# the product config works on.
-ifndef RBC_BOARD_CONFIG
+ifndef RBC_PRODUCT_CONFIG
 include $(board_config_mk)
 else
   $(shell mkdir -p $(OUT_DIR)/rbc)
@@ -254,7 +255,7 @@
   endif
 
   $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
-    $(OUT_DIR)/rbcrun RBC_OUT="make,global" $(OUT_DIR)/rbc/boardlauncher.rbc)
+    $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration runner failed: $(.SHELLSTATUS))
   endif
@@ -285,6 +286,8 @@
   $(if $(filter-out true false,$($(var))), \
     $(error Valid values of $(var) are "true", "false", and "". Not "$($(var))")))
 
+include $(BUILD_SYSTEM)/board_config_wifi.mk
+
 # Default *_CPU_VARIANT_RUNTIME to CPU_VARIANT if unspecified.
 TARGET_CPU_VARIANT_RUNTIME := $(or $(TARGET_CPU_VARIANT_RUNTIME),$(TARGET_CPU_VARIANT))
 TARGET_2ND_CPU_VARIANT_RUNTIME := $(or $(TARGET_2ND_CPU_VARIANT_RUNTIME),$(TARGET_2ND_CPU_VARIANT))
@@ -402,12 +405,6 @@
 endef
 
 ###########################################
-# Now we can substitute with the real value of TARGET_COPY_OUT_RAMDISK
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-TARGET_COPY_OUT_RAMDISK := $(TARGET_COPY_OUT_ROOT)
-endif
-
-###########################################
 # Configure whether we're building the system image
 BUILDING_SYSTEM_IMAGE := true
 ifeq ($(PRODUCT_BUILD_SYSTEM_IMAGE),)
@@ -556,15 +553,8 @@
 
 # Are we building a debug vendor_boot image
 BUILDING_DEBUG_VENDOR_BOOT_IMAGE :=
-# Can't build vendor_boot-debug.img if BOARD_BUILD_SYSTEM_ROOT_IMAGE is true,
-# because building debug vendor_boot image requires a ramdisk.
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-  ifeq ($(PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE),true)
-    $(warning PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE is true, but so is BOARD_BUILD_SYSTEM_ROOT_IMAGE. \
-      Skip building the debug vendor_boot image.)
-  endif
 # Can't build vendor_boot-debug.img if we're not building a ramdisk.
-else ifndef BUILDING_RAMDISK_IMAGE
+ifndef BUILDING_RAMDISK_IMAGE
   ifeq ($(PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE),true)
     $(warning PRODUCT_BUILD_DEBUG_VENDOR_BOOT_IMAGE is true, but we're not building a ramdisk image. \
       Skip building the debug vendor_boot image.)
@@ -601,15 +591,8 @@
 
 # Are we building a debug boot image
 BUILDING_DEBUG_BOOT_IMAGE :=
-# Can't build boot-debug.img if BOARD_BUILD_SYSTEM_ROOT_IMAGE is true,
-# because building debug boot image requires a ramdisk.
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-  ifeq ($(PRODUCT_BUILD_DEBUG_BOOT_IMAGE),true)
-    $(warning PRODUCT_BUILD_DEBUG_BOOT_IMAGE is true, but so is BOARD_BUILD_SYSTEM_ROOT_IMAGE. \
-      Skip building the debug boot image.)
-  endif
 # Can't build boot-debug.img if we're not building a ramdisk.
-else ifndef BUILDING_RAMDISK_IMAGE
+ifndef BUILDING_RAMDISK_IMAGE
   ifeq ($(PRODUCT_BUILD_DEBUG_BOOT_IMAGE),true)
     $(warning PRODUCT_BUILD_DEBUG_BOOT_IMAGE is true, but we're not building a ramdisk image. \
       Skip building the debug boot image.)
@@ -930,9 +913,6 @@
 .KATI_READONLY := BUILDING_SYSTEM_DLKM_IMAGE
 
 BOARD_USES_PVMFWIMAGE :=
-ifdef BOARD_PREBUILT_PVMFWIMAGE
-  BOARD_USES_PVMFWIMAGE := true
-endif
 ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
   BOARD_USES_PVMFWIMAGE := true
 endif
@@ -942,9 +922,6 @@
 ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
   BUILDING_PVMFW_IMAGE := true
 endif
-ifdef BOARD_PREBUILT_PVMFWIMAGE
-  BUILDING_PVMFW_IMAGE :=
-endif
 .KATI_READONLY := BUILDING_PVMFW_IMAGE
 
 ###########################################
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
new file mode 100644
index 0000000..ddeb0d7
--- /dev/null
+++ b/core/board_config_wifi.mk
@@ -0,0 +1,77 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# ###############################################################
+# This file adds WIFI variables into soong config namespace (`wifi`)
+# ###############################################################
+
+ifdef BOARD_WLAN_DEVICE
+    $(call soong_config_set,wifi,board_wlan_device,$(BOARD_WLAN_DEVICE))
+endif
+ifdef WIFI_DRIVER_MODULE_PATH
+    $(call soong_config_set,wifi,driver_module_path,$(WIFI_DRIVER_MODULE_PATH))
+endif
+ifdef WIFI_DRIVER_MODULE_ARG
+    $(call soong_config_set,wifi,driver_module_arg,$(WIFI_DRIVER_MODULE_ARG))
+endif
+ifdef WIFI_DRIVER_MODULE_NAME
+    $(call soong_config_set,wifi,driver_module_name,$(WIFI_DRIVER_MODULE_NAME))
+endif
+ifdef WIFI_DRIVER_FW_PATH_STA
+    $(call soong_config_set,wifi,driver_fw_path_sta,$(WIFI_DRIVER_FW_PATH_STA))
+endif
+ifdef WIFI_DRIVER_FW_PATH_AP
+    $(call soong_config_set,wifi,driver_fw_path_ap,$(WIFI_DRIVER_FW_PATH_AP))
+endif
+ifdef WIFI_DRIVER_FW_PATH_P2P
+    $(call soong_config_set,wifi,driver_fw_path_p2p,$(WIFI_DRIVER_FW_PATH_P2P))
+endif
+ifdef WIFI_DRIVER_FW_PATH_PARAM
+    $(call soong_config_set,wifi,driver_fw_path_param,$(WIFI_DRIVER_FW_PATH_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_CTRL_PARAM
+    $(call soong_config_set,wifi,driver_state_ctrl_param,$(WIFI_DRIVER_STATE_CTRL_PARAM))
+endif
+ifdef WIFI_DRIVER_STATE_ON
+    $(call soong_config_set,wifi,driver_state_on,$(WIFI_DRIVER_STATE_ON))
+endif
+ifdef WIFI_DRIVER_STATE_OFF
+    $(call soong_config_set,wifi,driver_state_off,$(WIFI_DRIVER_STATE_OFF))
+endif
+ifdef WIFI_MULTIPLE_VENDOR_HALS
+    $(call soong_config_set,wifi,multiple_vendor_hals,$(WIFI_MULTIPLE_VENDOR_HALS))
+endif
+ifneq ($(wildcard vendor/google/libraries/GoogleWifiConfigLib),)
+    $(call soong_config_set,wifi,google_wifi_config_lib,true)
+endif
+ifdef WIFI_HAL_INTERFACE_COMBINATIONS
+    $(call soong_config_set,wifi,hal_interface_combinations,$(WIFI_HAL_INTERFACE_COMBINATIONS))
+endif
+ifdef WIFI_HIDL_FEATURE_AWARE
+    $(call soong_config_set,wifi,hidl_feature_aware,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DUAL_INTERFACE
+    $(call soong_config_set,wifi,hidl_feature_dual_interface,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP
+    $(call soong_config_set,wifi,hidl_feature_disable_ap,true)
+endif
+ifdef WIFI_HIDL_FEATURE_DISABLE_AP_MAC_RANDOMIZATION
+    $(call soong_config_set,wifi,hidl_feature_disable_ap_mac_randomization,true)
+endif
+ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
+    $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif
\ No newline at end of file
diff --git a/core/cc_prebuilt_internal.mk b/core/cc_prebuilt_internal.mk
index e8e01d8..2de4115 100644
--- a/core/cc_prebuilt_internal.mk
+++ b/core/cc_prebuilt_internal.mk
@@ -139,6 +139,27 @@
 # my_shared_libraries).
 include $(BUILD_SYSTEM)/cxx_stl_setup.mk
 
+# When compiling against API imported module, use API import stub libraries.
+apiimport_postfix := .apiimport
+
+ifneq ($(LOCAL_USE_VNDK),)
+  ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
+    apiimport_postfix := .apiimport.product
+  else
+    apiimport_postfix := .apiimport.vendor
+  endif
+endif
+
+ifdef my_shared_libraries
+my_shared_libraries := $(foreach l,$(my_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_shared_libraries
+
+ifdef my_system_shared_libraries
+my_system_shared_libraries := $(foreach l,$(my_system_shared_libraries), \
+ $(if $(filter $(l), $(API_IMPORTED_SHARED_LIBRARIES)), $(l)$(apiimport_postfix), $(l)))
+endif #my_system_shared_libraries
+
 ifdef my_shared_libraries
 ifdef LOCAL_USE_VNDK
   ifeq ($(LOCAL_USE_VNDK_PRODUCT),true)
diff --git a/core/clang/TARGET_riscv64.mk b/core/clang/TARGET_riscv64.mk
new file mode 100644
index 0000000..cfb5c7d
--- /dev/null
+++ b/core/clang/TARGET_riscv64.mk
@@ -0,0 +1,10 @@
+RS_TRIPLE := renderscript64-linux-android
+RS_TRIPLE_CFLAGS := -D__riscv64__
+RS_COMPAT_TRIPLE := riscv64-linux-android
+
+TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-riscv64-android.a
+TARGET_LIBCRT_BUILTINS := $(LLVM_RTLIB_PATH)/libclang_rt.builtins-riscv64-android.a
+
+# Address sanitizer clang config
+ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
+ADDRESS_SANITIZER_LINKER_FILE := /system/bin/bootstrap/linker_asan64
diff --git a/core/cleanspec.mk b/core/cleanspec.mk
index af28954..0232a17 100644
--- a/core/cleanspec.mk
+++ b/core/cleanspec.mk
@@ -58,6 +58,12 @@
 #$(call add-clean-step, rm -rf $(OUT_DIR)/target/common/obj/JAVA_LIBRARIES/core_intermediates)
 #$(call add-clean-step, find $(OUT_DIR) -type f -name "IGTalkSession*" -print0 | xargs -0 rm -f)
 #$(call add-clean-step, rm -rf $(PRODUCT_OUT)/data/*)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-vendor_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-odm_intermediates)
+$(call add-clean-step, rm -rf $(OUT_DIR)/obj/ETC/build_manifest-product_intermediates)
+$(call add-clean-step, rm -rf $(TARGET_OUT_VENDOR)/etc/security/fsverity)
+$(call add-clean-step, rm -rf $(TARGET_OUT_ODM)/etc/security/fsverity)
+$(call add-clean-step, rm -rf $(TARGET_OUT_PRODUCT)/etc/security/fsverity)
 
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index b5b371c..e325760 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -134,6 +134,7 @@
 LOCAL_IS_HOST_MODULE:=
 LOCAL_IS_RUNTIME_RESOURCE_OVERLAY:=
 LOCAL_IS_UNIT_TEST:=
+LOCAL_TEST_OPTIONS_TAGS:=
 LOCAL_JACK_CLASSPATH:=
 LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
 LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
@@ -152,7 +153,6 @@
 LOCAL_JAR_PROCESSOR_ARGS:=
 LOCAL_JAVACFLAGS:=
 LOCAL_JAVA_LANGUAGE_VERSION:=
-LOCAL_JAVA_LAYERS_FILE:=
 LOCAL_JAVA_LIBRARIES:=
 LOCAL_JAVA_RESOURCE_DIRS:=
 LOCAL_JAVA_RESOURCE_FILES:=
diff --git a/core/combo/TARGET_linux-riscv64.mk b/core/combo/TARGET_linux-riscv64.mk
new file mode 100644
index 0000000..8f8fd3c
--- /dev/null
+++ b/core/combo/TARGET_linux-riscv64.mk
@@ -0,0 +1,40 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Configuration for Linux on riscv64 as a target.
+# Included by combo/select.mk
+
+# Provide a default variant.
+ifeq ($(strip $(TARGET_ARCH_VARIANT)),)
+TARGET_ARCH_VARIANT := riscv64
+endif
+
+# Include the arch-variant-specific configuration file.
+# Its role is to define various ARCH_X86_HAVE_XXX feature macros,
+# plus initial values for TARGET_GLOBAL_CFLAGS
+#
+TARGET_ARCH_SPECIFIC_MAKEFILE := $(BUILD_COMBOS)/arch/$(TARGET_ARCH)/$(TARGET_ARCH_VARIANT).mk
+ifeq ($(strip $(wildcard $(TARGET_ARCH_SPECIFIC_MAKEFILE))),)
+$(error Unknown $(TARGET_ARCH) architecture version: $(TARGET_ARCH_VARIANT))
+endif
+
+include $(TARGET_ARCH_SPECIFIC_MAKEFILE)
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
+
+TARGET_LINKER := /system/bin/linker64
diff --git a/core/combo/arch/riscv64/riscv64.mk b/core/combo/arch/riscv64/riscv64.mk
new file mode 100644
index 0000000..0505541
--- /dev/null
+++ b/core/combo/arch/riscv64/riscv64.mk
@@ -0,0 +1,2 @@
+# This file contains feature macro definitions specific to the
+# base 'riscv64' platform ABI.
diff --git a/core/config.mk b/core/config.mk
index e2bdcbd..631ba34 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -155,12 +155,18 @@
 $(KATI_obsolete_var COVERAGE_EXCLUDE_PATHS,Use NATIVE_COVERAGE_EXCLUDE_PATHS instead)
 $(KATI_obsolete_var BOARD_VNDK_RUNTIME_DISABLE,VNDK-Lite is no longer supported)
 $(KATI_obsolete_var LOCAL_SANITIZE_BLACKLIST,Use LOCAL_SANITIZE_BLOCKLIST instead)
-$(KATI_deprecated_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
-$(KATI_deprecated_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
+$(KATI_obsolete_var BOARD_PLAT_PUBLIC_SEPOLICY_DIR,Use SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS instead)
+$(KATI_obsolete_var BOARD_PLAT_PRIVATE_SEPOLICY_DIR,Use SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS instead)
 $(KATI_obsolete_var TARGET_NO_VENDOR_BOOT,Use PRODUCT_BUILD_VENDOR_BOOT_IMAGE instead)
 $(KATI_obsolete_var PRODUCT_CHECK_ELF_FILES,Use BUILD_BROKEN_PREBUILT_ELF_FILES instead)
 $(KATI_obsolete_var ALL_GENERATED_SOURCES,ALL_GENERATED_SOURCES is no longer used)
 $(KATI_obsolete_var ALL_ORIGINAL_DYNAMIC_BINARIES,ALL_ORIGINAL_DYNAMIC_BINARIES is no longer used)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_VERITY_FEC,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_SUPPORTS_BOOT_SIGNER,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var PRODUCT_VERITY_SIGNING_KEY,VB 1.0 and related variables are no longer supported)
+$(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and custom versions are no longer supported)
+$(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -226,8 +232,6 @@
 BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
 
 BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
-BUILD_HOST_DALVIK_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY :=$= $(BUILD_SYSTEM)/host_dalvik_static_java_library.mk
 
 include $(BUILD_SYSTEM)/deprecation.mk
 
@@ -602,15 +606,15 @@
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg_mke2fs
 MKE2FS_CONF := system/extras/ext4_utils/mke2fs.conf
 MKEROFS := $(HOST_OUT_EXECUTABLES)/mkfs.erofs
-MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
+MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage
+MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
 JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
 DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
 FAT16COPY := build/make/tools/fat16copy.py
-CHECK_ELF_FILE := build/make/tools/check_elf_file.py
+CHECK_ELF_FILE := $(HOST_OUT_EXECUTABLES)/check_elf_file$(HOST_EXECUTABLE_SUFFIX)
 LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
 ADD_IMG_TO_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/add_img_to_target_files$(HOST_EXECUTABLE_SUFFIX)
 BUILD_IMAGE := $(HOST_OUT_EXECUTABLES)/build_image$(HOST_EXECUTABLE_SUFFIX)
@@ -620,6 +624,7 @@
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 SPARSE_IMG := $(HOST_OUT_EXECUTABLES)/sparse_img$(HOST_EXECUTABLE_SUFFIX)
 CHECK_PARTITION_SIZES := $(HOST_OUT_EXECUTABLES)/check_partition_sizes$(HOST_EXECUTABLE_SUFFIX)
+SYMBOLS_MAP := $(HOST_OUT_EXECUTABLES)/symbols_map
 
 PROGUARD_HOME := external/proguard
 PROGUARD := $(PROGUARD_HOME)/bin/proguard.sh
@@ -630,10 +635,8 @@
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
 BUILD_VERITY_METADATA := $(HOST_OUT_EXECUTABLES)/build_verity_metadata
 BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
-BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
 FUTILITY := $(HOST_OUT_EXECUTABLES)/futility-host
 VBOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/vboot_signer
-FEC := $(HOST_OUT_EXECUTABLES)/fec
 
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
@@ -712,27 +715,11 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
-# If PRODUCT_USE_VNDK is true and BOARD_VNDK_VERSION is not defined yet,
-# BOARD_VNDK_VERSION will be set to "current" as default.
-# PRODUCT_USE_VNDK will be true in Android-P or later launching devices.
-PRODUCT_USE_VNDK := false
-ifneq ($(PRODUCT_USE_VNDK_OVERRIDE),)
-  PRODUCT_USE_VNDK := $(PRODUCT_USE_VNDK_OVERRIDE)
-else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  # No shipping level defined
-else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),27),true)
-  PRODUCT_USE_VNDK := $(PRODUCT_FULL_TREBLE)
+# Starting in Android U, non-VNDK devices not supported
+ifndef BOARD_VNDK_VERSION
+BOARD_VNDK_VERSION := current
 endif
 
-ifeq ($(PRODUCT_USE_VNDK),true)
-  ifndef BOARD_VNDK_VERSION
-    BOARD_VNDK_VERSION := current
-  endif
-endif
-
-$(KATI_obsolete_var PRODUCT_USE_VNDK,Use BOARD_VNDK_VERSION instead)
-$(KATI_obsolete_var PRODUCT_USE_VNDK_OVERRIDE,Use BOARD_VNDK_VERSION instead)
-
 ifdef PRODUCT_PRODUCT_VNDK_VERSION
   ifndef BOARD_VNDK_VERSION
     # VNDK for product partition is not available unless BOARD_VNDK_VERSION
@@ -804,6 +791,7 @@
 else
   MAINLINE_SEPOLICY_DEV_CERTIFICATES := $(dir $(DEFAULT_SYSTEM_DEV_CERTIFICATE))
 endif
+.KATI_READONLY := MAINLINE_SEPOLICY_DEV_CERTIFICATES
 
 BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
 BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
@@ -820,7 +808,7 @@
 # is made which breaks compatibility with the previous platform sepolicy version,
 # not just on every increase in PLATFORM_SDK_VERSION.  The minor version should
 # be reset to 0 on every bump of the PLATFORM_SDK_VERSION.
-sepolicy_major_vers := 32
+sepolicy_major_vers := 33
 sepolicy_minor_vers := 0
 
 ifneq ($(sepolicy_major_vers), $(PLATFORM_SDK_VERSION))
@@ -860,6 +848,7 @@
     30.0 \
     31.0 \
     32.0 \
+    33.0 \
 
 .KATI_READONLY := \
     PLATFORM_SEPOLICY_COMPAT_VERSIONS \
@@ -880,9 +869,6 @@
 endif
 
 ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
-    ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-        $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE cannot be true for devices with dynamic partitions)
-    endif
     ifneq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
         $(error PRODUCT_USE_DYNAMIC_PARTITION_SIZE must be true for devices with dynamic partitions)
     endif
@@ -973,16 +959,6 @@
     $(eval .KATI_READONLY := BOARD_$(group)_PARTITION_LIST) \
 )
 
-# BOARD_*_PARTITION_LIST: a list of the following tokens
-valid_super_partition_list := system vendor product system_ext odm vendor_dlkm odm_dlkm system_dlkm
-$(foreach group,$(call to-upper,$(BOARD_SUPER_PARTITION_GROUPS)), \
-    $(if $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)), \
-        $(error BOARD_$(group)_PARTITION_LIST contains invalid partition name \
-            $(filter-out $(valid_super_partition_list),$(BOARD_$(group)_PARTITION_LIST)). \
-            Valid names are $(valid_super_partition_list))))
-valid_super_partition_list :=
-
-
 # Define BOARD_SUPER_PARTITION_PARTITION_LIST, the sum of all BOARD_*_PARTITION_LIST
 ifdef BOARD_SUPER_PARTITION_PARTITION_LIST
 $(error BOARD_SUPER_PARTITION_PARTITION_LIST should not be defined, but computed from \
@@ -1232,10 +1208,27 @@
 endif
 .KATI_READONLY := GOMA_POOL RBE_POOL GOMA_OR_RBE_POOL
 
+JAVAC_NINJA_POOL :=
+R8_NINJA_POOL :=
+D8_NINJA_POOL :=
+
+ifneq ($(filter-out false,$(USE_RBE)),)
+  ifdef RBE_JAVAC
+    JAVAC_NINJA_POOL := $(RBE_POOL)
+  endif
+  ifdef RBE_R8
+    R8_NINJA_POOL := $(RBE_POOL)
+  endif
+  ifdef RBE_D8
+    D8_NINJA_POOL := $(RBE_POOL)
+  endif
+endif
+
+.KATI_READONLY := JAVAC_NINJA_POOL R8_NINJA_POOL D8_NINJA_POOL
+
 # These goals don't need to collect and include Android.mks/CleanSpec.mks
 # in the source tree.
-dont_bother_goals := out \
-    product-graph dump-products
+dont_bother_goals := out product-graph
 
 # Make ANDROID Soong config variables visible to Android.mk files, for
 # consistency with those defined in BoardConfig.mk files.
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index a0ff119..35c632c 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -161,17 +161,19 @@
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
-# Also disable CFI if ASAN is enabled.
+# Also disable CFI and MTE if ASAN is enabled.
 ifneq ($(filter address,$(my_sanitize)),)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
   my_sanitize_diag := $(filter-out cfi,$(my_sanitize_diag))
 endif
 
 # Disable memtag for host targets. Host executables in AndroidMk files are
 # deprecated, but some partners still have them floating around.
 ifdef LOCAL_IS_HOST_MODULE
-  my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
-  my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+  my_sanitize := $(filter-out memtag_heap memtag_stack,$(my_sanitize))
+  my_sanitize_diag := $(filter-out memtag_heap memtag_stack,$(my_sanitize_diag))
 endif
 
 # Disable sanitizers which need the UBSan runtime for host targets.
@@ -205,10 +207,13 @@
 ifneq ($(filter arm x86 x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)),)
   my_sanitize := $(filter-out hwaddress,$(my_sanitize))
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
 endif
 
 ifneq ($(filter hwaddress,$(my_sanitize)),)
   my_sanitize := $(filter-out address,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+  my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
   my_sanitize := $(filter-out thread,$(my_sanitize))
   my_sanitize := $(filter-out cfi,$(my_sanitize))
 endif
@@ -224,21 +229,27 @@
   endif
 endif
 
-ifneq ($(filter memtag_heap,$(my_sanitize)),)
-  # Add memtag ELF note.
-  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
-    ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
-      my_whole_static_libraries += note_memtag_heap_sync
-    else
-      my_whole_static_libraries += note_memtag_heap_async
-    endif
+ifneq ($(filter memtag_heap memtag_stack,$(my_sanitize)),)
+  ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+    my_cflags += -fsanitize-memtag-mode=sync
+    my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+  else
+    my_cflags += -fsanitize-memtag-mode=async
   endif
-  # This is all that memtag_heap does - it is not an actual -fsanitize argument.
-  # Remove it from the list.
+endif
+
+ifneq ($(filter memtag_heap,$(my_sanitize)),)
+  my_cflags += -fsanitize=memtag-heap
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
 endif
 
-my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+ifneq ($(filter memtag_stack,$(my_sanitize)),)
+  my_cflags += -fsanitize=memtag-stack
+  my_cflags += -march=armv8a+memtag
+  my_ldflags += -march=armv8a+memtag
+  my_asflags += -march=armv8a+memtag
+  my_sanitize := $(filter-out memtag_stack,$(my_sanitize))
+endif
 
 # TSAN is not supported on 32-bit architectures. For non-multilib cases, make
 # its use an error. For multilib cases, don't use it for the 32-bit case.
diff --git a/core/definitions.mk b/core/definitions.mk
index 2711f57..afa7f7b 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -41,6 +41,9 @@
 ALL_NON_MODULES:=
 NON_MODULES_WITHOUT_LICENSE_METADATA:=
 
+# List of copied targets that need license metadata copied.
+ALL_COPIED_TARGETS:=
+
 # Full paths to targets that should be added to the "make droid"
 # set of installed targets.
 ALL_DEFAULT_INSTALLED_MODULES:=
@@ -567,29 +570,48 @@
 ## Target directory for license metadata files.
 ###########################################################
 define license-metadata-dir
-$(call generated-sources-dir-for,META,lic,)
+$(call generated-sources-dir-for,META,lic,$(filter-out $(PRODUCT_OUT)%,$(1)))
 endef
 
+TARGETS_MISSING_LICENSE_METADATA:=
+
 ###########################################################
 # License metadata targets corresponding to targets in $(1)
 ###########################################################
 define corresponding-license-metadata
-$(strip $(foreach target, $(sort $(1)), \
+$(strip $(filter-out 0p,$(foreach target, $(sort $(1)), \
   $(if $(strip $(ALL_MODULES.$(target).META_LIC)), \
     $(ALL_MODULES.$(target).META_LIC), \
     $(if $(strip $(ALL_TARGETS.$(target).META_LIC)), \
       $(ALL_TARGETS.$(target).META_LIC), \
-      $(call append-path,$(call license-metadata-dir),$(patsubst $(OUT_DIR)%,out%,$(target).meta_lic))))))
+      $(eval TARGETS_MISSING_LICENSE_METADATA += $(target)) \
+    ) \
+  ) \
+)))
+endef
+
+###########################################################
+## Record a target $(1) copied from another target(s) $(2) that will need
+## license metadata.
+###########################################################
+define declare-copy-target-license-metadata
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir,$(1)))\
+  $(eval _tgt:=$(strip $(1)))\
+  $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
+  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+  $(eval ALL_COPIED_TARGETS += $(_tgt)),\
+  $(eval ALL_TARGETS.$(1).META_LIC:=$(module_license_metadata))))
 endef
 
 ###########################################################
 ## License metadata build rule for my_register_name $(1)
 ###########################################################
 define license-metadata-rule
-$(foreach meta_lic, $(subst //,/,$(ALL_MODULES.$(1).DELAYED_META_LIC)),$(call _license-metadata-rule,$(1),$(meta_lic)))
-$(call notice-rule,$(1))
+$(foreach meta_lic, $(ALL_MODULES.$(1).DELAYED_META_LIC),$(call _license-metadata-rule,$(1),$(meta_lic)))
 endef
 
+$(KATI_obsolete_var notice-rule, This function has been removed)
+
 define _license-metadata-rule
 $(strip $(eval _srcs := $(strip $(foreach d,$(ALL_MODULES.$(1).NOTICE_DEPS),$(if $(strip $(ALL_MODULES.$(call word-colon,1,$(d)).INSTALLED)), $(ALL_MODULES.$(call word-colon,1,$(d)).INSTALLED),$(if $(strip $(ALL_MODULES.$(call word-colon,1,$(d)).BUILT)), $(ALL_MODULES.$(call word-colon,1,$(d)).BUILT), $(call word-colon,1,$d)))))))
 $(strip $(eval _deps := $(sort $(filter-out $(2)%,\
@@ -619,69 +641,49 @@
 $(2): PRIVATE_IS_CONTAINER := $(ALL_MODULES.$(1).IS_CONTAINER)
 $(2): PRIVATE_PACKAGE_NAME := $(strip $(ALL_MODULES.$(1).LICENSE_PACKAGE_NAME))
 $(2): PRIVATE_INSTALL_MAP := $(_map)
+$(2): PRIVATE_MODULE_NAME := $(1)
 $(2): PRIVATE_MODULE_TYPE := $(ALL_MODULES.$(1).MODULE_TYPE)
 $(2): PRIVATE_MODULE_CLASS := $(ALL_MODULES.$(1).MODULE_CLASS)
 $(2): PRIVATE_INSTALL_MAP := $(_map)
+$(2): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,notice)/$(2)/arguments
 $(2): $(BUILD_LICENSE_METADATA)
 $(2) : $(foreach d,$(_deps),$(call word-colon,1,$(d))) $(foreach n,$(_notices),$(call word-colon,1,$(n)) )
 	rm -f $$@
 	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -mn ,$$(PRIVATE_MODULE_NAME))\
+	    $$(addprefix -mt ,$$(PRIVATE_MODULE_TYPE))\
+	    $$(addprefix -mc ,$$(PRIVATE_MODULE_CLASS))\
+	    $$(addprefix -k ,$$(PRIVATE_KINDS))\
+	    $$(addprefix -c ,$$(PRIVATE_CONDITIONS))\
+	    $$(addprefix -n ,$$(PRIVATE_NOTICES))\
+	    $$(addprefix -d ,$$(PRIVATE_NOTICE_DEPS))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCES))\
+	    $$(addprefix -m ,$$(PRIVATE_INSTALL_MAP))\
+	    $$(addprefix -t ,$$(PRIVATE_TARGETS))\
+	    $$(addprefix -i ,$$(PRIVATE_INSTALLED))\
+	    $$(addprefix -r ,$$(PRIVATE_PATH)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
 	OUT_DIR=$(OUT_DIR) $(BUILD_LICENSE_METADATA) \
-	  $$(addprefix -mt ,$$(PRIVATE_MODULE_TYPE)) \
-	  $$(addprefix -mc ,$$(PRIVATE_MODULE_CLASS)) \
-	  $$(addprefix -k ,$$(PRIVATE_KINDS)) \
-	  $$(addprefix -c ,$$(PRIVATE_CONDITIONS)) \
-	  $$(addprefix -n ,$$(PRIVATE_NOTICES)) \
-	  $$(addprefix -d ,$$(PRIVATE_NOTICE_DEPS)) \
-	  $$(addprefix -s ,$$(PRIVATE_SOURCES)) \
-	  $$(addprefix -m ,$$(PRIVATE_INSTALL_MAP)) \
-	  $$(addprefix -t ,$$(PRIVATE_TARGETS)) \
-	  $$(addprefix -i ,$$(PRIVATE_INSTALLED)) \
 	  $$(if $$(PRIVATE_IS_CONTAINER),-is_container) \
 	  -p '$$(PRIVATE_PACKAGE_NAME)' \
-	  $$(addprefix -r ,$$(PRIVATE_PATH)) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
 	  -o $$@
 endef
 
-define notice-rule
-$(strip $(eval _mifs := $(sort $(ALL_MODULES.$(1).MODULE_INSTALLED_FILENAMES))))
-$(strip $(eval _infs := $(sort $(ALL_MODULES.$(1).INSTALLED_NOTICE_FILE))))
-
-# Emit each installed notice file rule if it references the current module
-$(if $(_infs),$(foreach inf,$(_infs),
-$(if $(strip $(filter $(1),$(INSTALLED_NOTICE_FILES.$(inf).MODULE))),
-$(strip $(eval _mif := $(firstword $(foreach m,$(_mifs),$(if $(filter %/src/$(m).txt,$(inf)),$(m))))))
-
-$(inf): PRIVATE_INSTALLED_MODULE := $(_mif)
-$(inf) : PRIVATE_NOTICES := $(sort $(foreach n,$(_notices),$(call word-colon,1,$(n) )))
-
-$(inf): $(foreach n,$(_notices),$(call word-colon,1,$(n)) )
-	@echo Notice file: $$< -- $$@
-	mkdir -p $$(dir $$@)
-	awk 'FNR==1 && NR > 1 {print "\n"} {print}' $$(PRIVATE_NOTICES) > $$@
-
-)))
-
-endef
 
 ###########################################################
 ## License metadata build rule for non-module target $(1)
 ###########################################################
 define non-module-license-metadata-rule
-$(strip $(eval _dir := $(call license-metadata-dir)))
+$(strip $(eval _dir := $(call license-metadata-dir,$(1))))
 $(strip $(eval _tgt := $(strip $(1))))
 $(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic))))
 $(strip $(eval _deps := $(sort $(filter-out 0p: :,$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)),$(ALL_TARGETS.$(call word-colon,1,$(d)).META_LIC):$(call wordlist-colon,2,9999,$(d)))))))
 $(strip $(eval _notices := $(sort $(ALL_NON_MODULES.$(_tgt).NOTICES))))
 $(strip $(eval _path := $(sort $(ALL_NON_MODULES.$(_tgt).PATH))))
 $(strip $(eval _install_map := $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS)))
-$(strip $(eval \
-  $$(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
-    $$(if $$(strip $$(ALL_TARGETS.$$(d).META_LIC)), \
-      , \
-      $$(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $$(d))) \
-  )) \
-)
 
 $(_meta): PRIVATE_KINDS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_KINDS))
 $(_meta): PRIVATE_CONDITIONS := $(sort $(ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS))
@@ -693,27 +695,86 @@
 $(_meta): PRIVATE_IS_CONTAINER := $(ALL_NON_MODULES.$(_tgt).IS_CONTAINER)
 $(_meta): PRIVATE_PACKAGE_NAME := $(strip $(ALL_NON_MODULES.$(_tgt).LICENSE_PACKAGE_NAME))
 $(_meta): PRIVATE_INSTALL_MAP := $(strip $(_install_map))
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,notice)/$(_meta)/arguments
 $(_meta): $(BUILD_LICENSE_METADATA)
 $(_meta) : $(foreach d,$(_deps),$(call word-colon,1,$(d))) $(foreach n,$(_notices),$(call word-colon,1,$(n)) )
 	rm -f $$@
 	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -k ,$$(PRIVATE_KINDS))\
+	    $$(addprefix -c ,$$(PRIVATE_CONDITIONS))\
+	    $$(addprefix -n ,$$(PRIVATE_NOTICES))\
+	    $$(addprefix -d ,$$(PRIVATE_NOTICE_DEPS))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCES))\
+	    $$(addprefix -m ,$$(PRIVATE_INSTALL_MAP))\
+	    $$(addprefix -t ,$$(PRIVATE_TARGETS))\
+	    $$(addprefix -r ,$$(PRIVATE_PATH)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
 	OUT_DIR=$(OUT_DIR) $(BUILD_LICENSE_METADATA) \
           -mt raw -mc unknown \
-	  $$(addprefix -k ,$$(PRIVATE_KINDS)) \
-	  $$(addprefix -c ,$$(PRIVATE_CONDITIONS)) \
-	  $$(addprefix -n ,$$(PRIVATE_NOTICES)) \
-	  $$(addprefix -d ,$$(PRIVATE_NOTICE_DEPS)) \
-	  $$(addprefix -s ,$$(PRIVATE_SOURCES)) \
-	  $$(addprefix -m ,$$(PRIVATE_INSTALL_MAP)) \
-	  $$(addprefix -t ,$$(PRIVATE_TARGETS)) \
 	  $$(if $$(PRIVATE_IS_CONTAINER),-is_container) \
-	  -p '$$(PRIVATE_PACKAGE_NAME)' \
 	  $$(addprefix -r ,$$(PRIVATE_PATH)) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
 	  -o $$@
 
 endef
 
 ###########################################################
+## Record missing dependencies for non-module target $(1)
+###########################################################
+define record-missing-non-module-dependencies
+$(strip $(eval _tgt := $(strip $(1))))
+$(strip $(foreach d,$(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES)), \
+  $(if $(strip $(ALL_TARGETS.$(d).META_LIC)), \
+    , \
+    $(eval NON_MODULES_WITHOUT_LICENSE_METADATA += $(d))) \
+))
+endef
+
+###########################################################
+## License metadata build rule for copied target $(1)
+###########################################################
+define copied-target-license-metadata-rule
+$(if $(strip $(ALL_TARGETS.$(1).META_LIC)),,$(call _copied-target-license-metadata-rule,$(1)))
+endef
+
+define _copied-target-license-metadata-rule
+$(strip $(eval _dir := $(call license-metadata-dir,$(1))))
+$(strip $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(1).meta_lic))))
+$(strip $(eval ALL_TARGETS.$(1).META_LIC:=$(_meta)))
+$(strip $(eval _dep:=))
+$(strip $(foreach s,$(ALL_COPIED_TARGETS.$(1).SOURCES),\
+  $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+  $(if $(filter 0p,$(_dmeta)),\
+    $(if $(filter-out 0p,$(_dep)),,$(eval ALL_TARGETS.$(1).META_LIC:=0p)),\
+    $(if $(_dep),\
+      $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),
+      $(eval _dep:=$(_dmeta))))))
+$(strip $(if $(strip $(_dep)),,$(error cannot copy target from unknown module: $(1) from $(ALL_COPIED_TARGETS.$(1).SOURCES))))
+
+ifneq (0p,$(ALL_TARGETS.$(1).META_LIC))
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(ALL_COPIED_TARGETS.$(1).SOURCES)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,PACKAGING,copynotice)/$(_meta)/arguments
+$(_meta) : $(_dep) $(COPY_LICENSE_METADATA)
+	rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+	    $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
+	OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
+	  -o $$@
+
+endif
+endef
+
+###########################################################
 ## Declare the license metadata for non-module target $(1).
 ##
 ## $(2) -- license kinds e.g. SPDX-license-identifier-Apache-2.0
@@ -726,6 +787,7 @@
 $(strip \
   $(eval _tgt := $(subst //,/,$(strip $(1)))) \
   $(eval ALL_NON_MODULES += $(_tgt)) \
+  $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
   $(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
   $(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
   $(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -766,6 +828,7 @@
 $(strip \
   $(eval _tgt := $(subst //,/,$(strip $(1)))) \
   $(eval ALL_NON_MODULES += $(_tgt)) \
+  $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
   $(eval ALL_NON_MODULES.$(_tgt).LICENSE_KINDS := $(strip $(2))) \
   $(eval ALL_NON_MODULES.$(_tgt).LICENSE_CONDITIONS := $(strip $(3))) \
   $(eval ALL_NON_MODULES.$(_tgt).NOTICES := $(strip $(4))) \
@@ -836,8 +899,9 @@
 ###########################################################
 define declare-license-deps
 $(strip \
-  $(eval _tgt := $(strip $(1))) \
+  $(eval _tgt := $(subst //,/,$(strip $(1)))) \
   $(eval ALL_NON_MODULES += $(_tgt)) \
+  $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
   $(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
 )
 endef
@@ -852,8 +916,9 @@
 ###########################################################
 define declare-container-license-deps
 $(strip \
-  $(eval _tgt := $(strip $(1))) \
+  $(eval _tgt := $(subst //,/,$(strip $(1)))) \
   $(eval ALL_NON_MODULES += $(_tgt)) \
+  $(eval ALL_TARGETS.$(_tgt).META_LIC := $(call license-metadata-dir,$(1))/$(patsubst $(OUT_DIR)%,out%,$(_tgt)).meta_lic) \
   $(eval ALL_NON_MODULES.$(_tgt).DEPENDENCIES := $(strip $(ALL_NON_MODULES.$(_tgt).DEPENDENCIES) $(2))) \
   $(eval ALL_NON_MODULES.$(_tgt).IS_CONTAINER := true) \
   $(eval ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS := $(strip $(ALL_NON_MODULES.$(_tgt).ROOT_MAPPINGS) $(3))) \
@@ -865,12 +930,14 @@
 ###########################################################
 define report-missing-licenses-rule
 .PHONY: reportmissinglicenses
-reportmissinglicenses: PRIVATE_NON_MODULES:=$(sort $(NON_MODULES_WITHOUT_LICENSE_METADATA))
-reportmissinglicenses: PRIVATE_COPIED_FILES:=$(sort $(filter $(NON_MODULES_WITHOUT_LICENSE_METADATA),$(foreach _pair,$(PRODUCT_COPY_FILES), $(PRODUCT_OUT)/$(call word-colon,2,$(_pair)))))
+reportmissinglicenses: PRIVATE_NON_MODULES:=$(sort $(NON_MODULES_WITHOUT_LICENSE_METADATA) $(TARGETS_MISSING_LICENSE_METADATA))
+reportmissinglicenses: PRIVATE_COPIED_FILES:=$(sort $(filter $(NON_MODULES_WITHOUT_LICENSE_METADATA) $(TARGETS_MISSING_LICENSE_METADATA),\
+  $(foreach _pair,$(PRODUCT_COPY_FILES), $(PRODUCT_OUT)/$(call word-colon,2,$(_pair)))))
 reportmissinglicenses:
 	@echo Reporting $$(words $$(PRIVATE_NON_MODULES)) targets without license metadata
 	$$(foreach t,$$(PRIVATE_NON_MODULES),if ! [ -h $$(t) ]; then echo No license metadata for $$(t) >&2; fi;)
 	$$(foreach t,$$(PRIVATE_COPIED_FILES),if ! [ -h $$(t) ]; then echo No license metadata for copied file $$(t) >&2; fi;)
+	echo $$(words $$(PRIVATE_NON_MODULES)) targets missing license metadata >&2
 
 endef
 
@@ -892,7 +959,7 @@
 $(strip $(eval _all := $(call all-license-metadata)))
 
 .PHONY: reportallnoticelibrarynames
-reportallnoticelibrarynames: PRIVATE_LIST_FILE := $(call license-metadata-dir)/filelist
+reportallnoticelibrarynames: PRIVATE_LIST_FILE := $(call license-metadata-dir,COMMON)/filelist
 reportallnoticelibrarynames: | $(COMPLIANCENOTICE_SHIPPEDLIBS)
 reportallnoticelibrarynames: $(_all)
 	@echo Reporting notice library names for at least $$(words $(_all)) license metadata files
@@ -919,17 +986,12 @@
 ###########################################################
 define build-license-metadata
 $(strip \
-  $(strip $(eval _dir := $(call license-metadata-dir))) \
   $(foreach t,$(sort $(ALL_0P_TARGETS)), \
     $(eval ALL_TARGETS.$(t).META_LIC := 0p) \
   ) \
-  $(foreach t,$(sort $(ALL_NON_MODULES)), \
-    $(eval ALL_TARGETS.$(t).META_LIC := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(t).meta_lic))) \
-  ) \
+  $(foreach t,$(sort $(ALL_COPIED_TARGETS)),$(eval $(call copied-target-license-metadata-rule,$(t)))) \
   $(foreach t,$(sort $(ALL_NON_MODULES)),$(eval $(call non-module-license-metadata-rule,$(t)))) \
   $(foreach m,$(sort $(ALL_MODULES)),$(eval $(call license-metadata-rule,$(m)))) \
-  $(eval $(call report-missing-licenses-rule)) \
-  $(eval $(call report-all-notice-library-names-rule)) \
   $(eval $(call build-all-license-metadata-rule)))
 endef
 
@@ -1001,6 +1063,22 @@
 )
 endef
 
+# Uses LOCAL_MODULE_CLASS, LOCAL_MODULE, and LOCAL_IS_HOST_MODULE
+# to determine the intermediates directory.
+#
+# $(1): if non-empty, force the intermediates to be COMMON
+# $(2): if non-empty, force the intermediates to be for the 2nd arch
+# $(3): if non-empty, force the intermediates to be for the host cross os
+define local-meta-intermediates-dir
+$(strip \
+    $(if $(strip $(LOCAL_MODULE_CLASS)),, \
+        $(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined before call to local-meta-intermediates-dir)) \
+    $(if $(strip $(LOCAL_MODULE)),, \
+        $(error $(LOCAL_PATH): LOCAL_MODULE not defined before call to local-meta-intermediates-dir)) \
+    $(call intermediates-dir-for,META$(LOCAL_MODULE_CLASS),$(LOCAL_MODULE),$(if $(strip $(LOCAL_IS_HOST_MODULE)),HOST),$(1),$(2),$(3)) \
+)
+endef
+
 ###########################################################
 ## The generated sources directory.  Placing generated
 ## source files directly in the intermediates directory
@@ -2368,7 +2446,99 @@
         @$(call emit-line,$(wordlist 12001,12500,$(1)),$(2))
         @$(call emit-line,$(wordlist 12501,13000,$(1)),$(2))
         @$(call emit-line,$(wordlist 13001,13500,$(1)),$(2))
-        @$(if $(wordlist 13501,13502,$(1)),$(error Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 13501,14000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 14001,14500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 14501,15000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 15001,15500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 15501,16000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 16001,16500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 16501,17000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 17001,17500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 17501,18000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 18001,18500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 18501,19000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 19001,19500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 19501,20000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 20001,20500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 20501,21000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 21001,21500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 21501,22000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 22001,22500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 22501,23000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 23001,23500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 23501,24000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 24001,24500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 24501,25000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 25001,25500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 25501,26000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 26001,26500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 26501,27000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 27001,27500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 27501,28000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 28001,28500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 28501,29000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 29001,29500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 29501,30000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 30001,30500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 30501,31000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 31001,31500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 31501,32000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 32001,32500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 32501,33000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 33001,33500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 33501,34000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 34001,34500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 34501,35000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 35001,35500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 35501,36000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 36001,36500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 36501,37000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 37001,37500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 37501,38000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 38001,38500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 38501,39000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 39001,39500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 39501,40000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 40001,40500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 40501,41000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 41001,41500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 41501,42000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 42001,42500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 42501,43000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 43001,43500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 43501,44000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 44001,44500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 44501,45000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 45001,45500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 45501,46000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 46001,46500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 46501,47000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 47001,47500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 47501,48000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 48001,48500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 48501,49000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 49001,49500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 49501,50000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 50001,50500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 50501,51000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 51001,51500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 51501,52000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 52001,52500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 52501,53000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 53001,53500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 53501,54000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 54001,54500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 54501,55000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 55001,55500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 55501,56000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 56001,56500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 56501,57000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 57001,57500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 57501,58000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 58001,58500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 58501,59000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 59001,59500,$(1)),$(2))
+        @$(if $(wordlist 59501,59502,$(1)),$(error Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
@@ -2434,8 +2604,6 @@
     $(if $(PRIVATE_SRCJARS),\@$(PRIVATE_SRCJAR_LIST_FILE)) \
     || ( rm -rf $(PRIVATE_CLASS_INTERMEDIATES_DIR) ; exit 41 ) \
 fi
-$(if $(PRIVATE_JAVA_LAYERS_FILE), $(hide) build/make/tools/java-layers.py \
-    $(PRIVATE_JAVA_LAYERS_FILE) @$(PRIVATE_JAVA_SOURCE_LIST),)
 $(if $(PRIVATE_JAR_EXCLUDE_FILES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) \
     -name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
     $(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
@@ -2566,7 +2734,7 @@
 @mkdir -p $(dir $@)tmp
 $(hide) rm -f $(dir $@)classes*.dex $(dir $@)d8_input.jar
 $(hide) $(ZIP2ZIP) -j -i $< -o $(dir $@)d8_input.jar "**/*.class"
-$(hide) $(D8_WRAPPER) $(DX_COMMAND) $(DEX_FLAGS) \
+$(hide) $(D8_WRAPPER) $(D8_COMMAND) \
     --output $(dir $@)tmp \
     $(addprefix --lib ,$(PRIVATE_D8_LIBS)) \
     --min-api $(PRIVATE_MIN_SDK_VERSION) \
@@ -2653,6 +2821,7 @@
 $(hide) mv $(1) $(1).unsigned
 $(hide) $(JAVA) -Djava.library.path=$$(dirname $(SIGNAPK_JNI_LIBRARY_PATH)) -jar $(SIGNAPK_JAR) \
     $(if $(strip $(PRIVATE_CERTIFICATE_LINEAGE)), --lineage $(PRIVATE_CERTIFICATE_LINEAGE)) \
+    $(if $(strip $(PRIVATE_ROTATION_MIN_SDK_VERSION)), --rotation-min-sdk-version $(PRIVATE_ROTATION_MIN_SDK_VERSION)) \
     $(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
     $(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
 $(hide) mv $(1).signed $(1)
@@ -2807,6 +2976,19 @@
 	$$(copy-file-to-target)
 endef
 
+# Define a rule to copy a license metadata file. For use via $(eval).
+# $(1): source license metadata file
+# $(2): destination license metadata file
+# $(3): built targets
+# $(4): installed targets
+define copy-one-license-metadata-file
+$(2): PRIVATE_BUILT=$(3)
+$(2): PRIVATE_INSTALLED=$(4)
+$(2): $(1)
+	@echo "Copy: $$@"
+	$$(call copy-license-metadata-file-to-target,$$(PRIVATE_BUILT),$$(PRIVATE_INSTALLED))
+endef
+
 define copy-and-uncompress-dexs
 $(2): $(1) $(ZIPALIGN) $(ZIP2ZIP)
 	@echo "Uncompress dexs in: $$@"
@@ -2855,7 +3037,7 @@
 # $(2): destination file
 define copy-init-script-file-checked
 ifdef TARGET_BUILD_UNBUNDLED
-# TODO (b/185624993): Remove the chck on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
+# TODO (b/185624993): Remove the check on TARGET_BUILD_UNBUNDLED when host_init_verifier can run
 # without requiring the HIDL interface map.
 $(2): $(1)
 else ifneq ($(HOST_OS),darwin)
@@ -2994,6 +3176,17 @@
 $(hide) cp "$<" "$@"
 endef
 
+# Same as copy-file-to-target, but assume file is a licenes metadata file,
+# and append built from $(1) and installed from $(2).
+define copy-license-metadata-file-to-target
+@mkdir -p $(dir $@)
+$(hide) rm -f $@
+$(hide) cp "$<" "$@" $(strip \
+  $(foreach b,$(1), && (grep -F 'built: "'"$(b)"'"' "$@" >/dev/null || echo 'built: "'"$(b)"'"' >>"$@")) \
+  $(foreach i,$(2), && (grep -F 'installed: "'"$(i)"'"' "$@" >/dev/null || echo 'installed: "'"$(i)"'"' >>"$@")) \
+)
+endef
+
 # The same as copy-file-to-target, but use the local
 # cp command instead of acp.
 define copy-file-to-target-with-cp
@@ -3111,6 +3304,50 @@
 fi
 endef
 
+# Copy an unstripped binary to the symbols directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+define copy-unstripped-elf-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),\
+  elf,$(patsubst $(TARGET_OUT_UNSTRIPPED)/%,$(call intermediates-dir-for,PACKAGING,elf_symbol_mapping)/%,$(2).textproto))
+endef
+
+# Copy an R8 dictionary to the packaging directory while also extracting
+# a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in packaging directory
+# $(3): path in mappings packaging directory
+define copy-r8-dictionary-file-with-mapping
+$(call _copy-symbols-file-with-mapping,$(1),$(2),r8,$(3))
+endef
+
+# Copy an unstripped binary or R8 dictionary to the symbols directory
+# while also extracting a hash mapping to the mapping directory.
+# $(1): unstripped intermediates file
+# $(2): path in symbols directory
+# $(3): file type (elf or r8)
+# $(4): path in the mappings directory
+define _copy-symbols-file-with-mapping
+$(2): .KATI_IMPLICIT_OUTPUTS := $(4)
+$(2): $(SYMBOLS_MAP)
+$(2): $(1)
+	@echo "Copy symbols with mapping: $$@"
+	$$(copy-file-to-target)
+	$(SYMBOLS_MAP) -$(strip $(3)) $(2) -write_if_changed $(4)
+.KATI_RESTAT: $(2)
+endef
+
+# Returns the directory to copy proguard dictionaries into
+define local-proguard-dictionary-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
+# Returns the directory to copy proguard dictionary mappings into
+define local-proguard-dictionary-mapping-directory
+$(call intermediates-dir-for,PACKAGING,proguard_dictionary_mapping)/out/target/common/obj/$(LOCAL_MODULE_CLASS)/$(LOCAL_MODULE)_intermediates
+endef
+
 
 ###########################################################
 ## Commands to call R8
@@ -3126,7 +3363,7 @@
 define transform-jar-to-dex-r8
 @echo R8: $@
 $(hide) rm -f $(PRIVATE_PROGUARD_DICTIONARY)
-$(hide) $(R8_WRAPPER) $(R8_COMPAT_PROGUARD) $(DEX_FLAGS) \
+$(hide) $(R8_WRAPPER) $(R8_COMMAND) \
     -injars '$<' \
     --min-api $(PRIVATE_MIN_SDK_VERSION) \
     --no-data-resources \
@@ -3273,8 +3510,6 @@
   STATIC_TEST_LIBRARY \
   HOST_STATIC_TEST_LIBRARY \
   NOTICE_FILE \
-  HOST_DALVIK_JAVA_LIBRARY \
-  HOST_DALVIK_STATIC_JAVA_LIBRARY \
   base_rules \
   HEADER_LIBRARY \
   HOST_TEST_CONFIG \
@@ -3317,12 +3552,12 @@
 define create-suite-dependencies
 $(foreach suite, $(LOCAL_COMPATIBILITY_SUITE), \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
-    $$(eval my_test_data += $$(foreach f,$$(my_compat_dist_$(suite)), $$(call word-colon,2,$$(f)))) \
+    $$(foreach f,$$(my_compat_dist_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
+    $$(eval my_test_data += $$(my_compat_dist_$(suite))) \
   )) \
   $(eval $(if $(strip $(module_license_metadata)),\
-    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(eval ALL_TARGETS.$$(call word-colon,2,$$(f)).META_LIC := $(module_license_metadata))),\
-    $$(eval my_test_config += $$(foreach f,$$(my_compat_dist_config_$(suite)), $$(call word-colon,2,$$(f)))) \
+    $$(foreach f,$$(my_compat_dist_config_$(suite)),$$(call declare-copy-target-license-metadata,$$(call word-colon,2,$$(f)),$$(call word-colon,1,$$(f)))),\
+    $$(eval my_test_config += $$(my_compat_dist_config_$(suite))) \
   )) \
   $(if $(filter $(suite),$(ALL_COMPATIBILITY_SUITES)),,\
     $(eval ALL_COMPATIBILITY_SUITES += $(suite)) \
@@ -3600,6 +3835,10 @@
 -include $(TOPDIR)vendor/*/build/core/definitions.mk
 -include $(TOPDIR)device/*/build/core/definitions.mk
 -include $(TOPDIR)product/*/build/core/definitions.mk
+# Also the project-specific definitions.mk file
+-include $(TOPDIR)vendor/*/*/build/core/definitions.mk
+-include $(TOPDIR)device/*/*/build/core/definitions.mk
+-include $(TOPDIR)product/*/*/build/core/definitions.mk
 
 # broken:
 #	$(foreach file,$^,$(if $(findstring,.a,$(suffix $file)),-l$(file),$(file)))
diff --git a/core/deprecation.mk b/core/deprecation.mk
index 2b7a869..ed4215e 100644
--- a/core/deprecation.mk
+++ b/core/deprecation.mk
@@ -3,8 +3,6 @@
   BUILD_EXECUTABLE \
   BUILD_FUZZ_TEST \
   BUILD_HEADER_LIBRARY \
-  BUILD_HOST_DALVIK_JAVA_LIBRARY \
-  BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
   BUILD_HOST_JAVA_LIBRARY \
   BUILD_HOST_PREBUILT \
   BUILD_JAVA_LIBRARY \
@@ -39,6 +37,8 @@
 OBSOLETE_BUILD_MODULE_TYPES :=$= \
   BUILD_AUX_EXECUTABLE \
   BUILD_AUX_STATIC_LIBRARY \
+  BUILD_HOST_DALVIK_JAVA_LIBRARY \
+  BUILD_HOST_DALVIK_STATIC_JAVA_LIBRARY \
   BUILD_HOST_FUZZ_TEST \
   BUILD_HOST_NATIVE_TEST \
   BUILD_HOST_SHARED_TEST_LIBRARY \
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index d5293cf..c11b7f4 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -96,7 +96,6 @@
   $(call add_json_list, DisablePreoptModules,                    $(DEXPREOPT_DISABLED_MODULES))
   $(call add_json_bool, OnlyPreoptBootImageAndSystemServer,      $(filter true,$(WITH_DEXPREOPT_BOOT_IMG_AND_SYSTEM_SERVER_ONLY)))
   $(call add_json_bool, PreoptWithUpdatableBcp,                  $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)))
-  $(call add_json_bool, UseArtImage,                             $(filter true,$(DEXPREOPT_USE_ART_IMAGE)))
   $(call add_json_bool, DontUncompressPrivAppsDex,               $(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS)))
   $(call add_json_list, ModulesLoadedByPrivilegedModules,        $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
   $(call add_json_bool, HasSystemOther,                          $(BOARD_USES_SYSTEM_OTHER_ODEX))
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index ea50313..b303b52 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -245,7 +245,7 @@
   $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
   $(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
   $(my_enforced_uses_libraries): PRIVATE_RELAX_CHECK := $(my_relax_check_arg)
-  $(my_enforced_uses_libraries): $(AAPT)
+  $(my_enforced_uses_libraries): $(AAPT2)
   $(my_enforced_uses_libraries): $(my_verify_script)
   $(my_enforced_uses_libraries): $(my_dexpreopt_dep_configs)
   $(my_enforced_uses_libraries): $(my_manifest_or_apk)
@@ -254,7 +254,7 @@
 	$(my_verify_script) \
 	  --enforce-uses-libraries \
 	  --enforce-uses-libraries-status $@ \
-	  --aapt $(AAPT) \
+	  --aapt $(AAPT2) \
 	  $(PRIVATE_USES_LIBRARIES) \
 	  $(PRIVATE_OPTIONAL_USES_LIBRARIES) \
 	  $(PRIVATE_DEXPREOPT_CONFIGS) \
@@ -272,11 +272,13 @@
 my_dexpreopt_images_deps :=
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
+# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
+# for each boot image (primary and the framework extension). The only reason why
+# the primary image is exposed to Make is testing (art gtests) and benchmarking
+# (art golem benchmarks). Install rules that use those variables are in
+# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
 my_dexpreopt_infix := boot
 my_create_dexpreopt_config :=
-ifeq (true, $(DEXPREOPT_USE_ART_IMAGE))
-  my_dexpreopt_infix := art
-endif
 
 ifdef LOCAL_DEX_PREOPT
   ifeq (,$(filter PRESIGNED,$(LOCAL_CERTIFICATE)))
diff --git a/core/distdir.mk b/core/distdir.mk
index aad8ff3..bce8e7f 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -45,6 +45,140 @@
     $(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
 endef
 
+.PHONY: shareprojects
+
+define __share-projects-rule
+$(1) : PRIVATE_TARGETS := $(2)
+$(1): $(2) $(COMPLIANCE_LISTSHARE)
+	$(hide) rm -f $$@
+	mkdir -p $$(dir $$@)
+	$$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(COMPLIANCE_LISTSHARE) -o $$@ $$(PRIVATE_TARGETS),touch $$@)
+endef
+
+# build list of projects to share in $(1) for meta_lic in $(2)
+#
+# $(1): the intermediate project sharing file
+# $(2): the license metadata to base the sharing on
+define _share-projects-rule
+$(eval $(call __share-projects-rule,$(1),$(2)))
+endef
+
+.PHONY: alllicensetexts
+
+define __license-texts-rule
+$(2) : PRIVATE_GOAL := $(1)
+$(2) : PRIVATE_TARGETS := $(3)
+$(2) : PRIVATE_ROOTS := $(4)
+$(2) : PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,licensetexts)/$(2)/arguments
+$(2): $(3) $(TEXTNOTICE)
+	$(hide) rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(if $$(strip $$(PRIVATE_TARGETS)),$$(call dump-words-to-file,\
+            -product="$$(PRIVATE_GOAL)" -title="$$(PRIVATE_GOAL)" \
+            $$(addprefix -strip_prefix ,$$(PRIVATE_ROOTS)) \
+            -strip_prefix=$(PRODUCT_OUT)/ -strip_prefix=$(HOST_OUT)/\
+            $$(PRIVATE_TARGETS),\
+            $$(PRIVATE_ARGUMENT_FILE)))
+	$$(if $$(strip $$(PRIVATE_TARGETS)),OUT_DIR=$(OUT_DIR) $(TEXTNOTICE) -o $$@ @$$(PRIVATE_ARGUMENT_FILE),touch $$@)
+endef
+
+# build list of projects to share in $(2) for meta_lic in $(3) for dist goals $(1)
+# Strip `out/dist/` used as proxy for 'DIST_DIR'
+#
+# $(1): the name of the dist goals
+# $(2): the intermediate project sharing file
+# $(3): the license metadata to base the sharing on
+define _license-texts-rule
+$(eval $(call __license-texts-rule,$(1),$(2),$(3),out/dist/))
+endef
+
+###########################################################
+## License metadata build rule for dist target $(1) with meta_lic $(2) copied from $(3)
+###########################################################
+define _dist-target-license-metadata-rule
+$(strip $(eval _meta :=$(2)))
+$(strip $(eval _dep:=))
+# 0p is the indicator for a non-copyrightable file where no party owns the copyright.
+# i.e. pure data with no copyrightable expression.
+# If all of the sources are 0p and only 0p, treat the copied file as 0p. Otherwise, all
+# of the sources must either be 0p or originate from a single metadata file to copy.
+$(strip $(foreach s,$(strip $(3)),\
+  $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
+  $(if $(strip $(_dmeta)),\
+    $(if $(filter-out 0p,$(_dep)),\
+      $(if $(filter-out $(_dep) 0p,$(_dmeta)),\
+        $(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta)),\
+        $(if $(filter 0p,$(_dep)),$(eval _dep:=$(_dmeta)))),\
+      $(eval _dep:=$(_dmeta))\
+    ),\
+    $(eval TARGETS_MISSING_LICENSE_METADATA += $(s) $(1)))))
+
+
+ifeq (0p,$(strip $(_dep)))
+# Not copyrightable. No emcumbrances, no license text, no license kind etc.
+$(_meta): PRIVATE_CONDITIONS := unencumbered
+$(_meta): PRIVATE_SOURCES := $(3)
+$(_meta): PRIVATE_INSTALLED := $(1)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,notice)/$(1)/arguments
+$(_meta): $(BUILD_LICENSE_METADATA)
+$(_meta) :
+	rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -c ,$$(PRIVATE_CONDITIONS))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCES))\
+	    $$(addprefix -t ,$$(PRIVATE_TARGETS))\
+	    $$(addprefix -i ,$$(PRIVATE_INSTALLED)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
+	OUT_DIR=$(OUT_DIR) $(BUILD_LICENSE_METADATA) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
+	  -o $$@
+
+else ifneq (,$(strip $(_dep)))
+# Not a missing target, copy metadata and `is_container` etc. from license metadata file `$(_dep)`
+$(_meta): PRIVATE_DEST_TARGET := $(1)
+$(_meta): PRIVATE_SOURCE_TARGETS := $(3)
+$(_meta): PRIVATE_SOURCE_METADATA := $(_dep)
+# use `$(1)` which is the unique and relatively short `out/dist/$(target)`
+$(_meta): PRIVATE_ARGUMENT_FILE := $(call intermediates-dir-for,METAPACKAGING,copynotice)/$(1)/arguments
+$(_meta) : $(_dep) $(COPY_LICENSE_METADATA)
+	rm -f $$@
+	mkdir -p $$(dir $$@)
+	mkdir -p $$(dir $$(PRIVATE_ARGUMENT_FILE))
+	$$(call dump-words-to-file,\
+	    $$(addprefix -i ,$$(PRIVATE_DEST_TARGET))\
+	    $$(addprefix -s ,$$(PRIVATE_SOURCE_TARGETS))\
+	    $$(addprefix -d ,$$(PRIVATE_SOURCE_METADATA)),\
+	    $$(PRIVATE_ARGUMENT_FILE))
+	OUT_DIR=$(OUT_DIR) $(COPY_LICENSE_METADATA) \
+	  @$$(PRIVATE_ARGUMENT_FILE) \
+	  -o $$@
+
+endif
+endef
+
+# use `out/dist/` as a proxy for 'DIST_DIR'
+define _add_projects_to_share
+$(strip $(eval _mdir := $(call intermediates-dir-for,METAPACKAGING,meta)/out/dist)) \
+$(strip $(eval _idir := $(call intermediates-dir-for,METAPACKAGING,shareprojects))) \
+$(strip $(eval _tdir := $(call intermediates-dir-for,METAPACKAGING,licensetexts))) \
+$(strip $(eval _allt := $(sort $(foreach goal,$(_all_dist_goal_output_pairs),$(call word-colon,2,$(goal)))))) \
+$(foreach target,$(_allt), \
+  $(eval _goals := $(sort $(foreach dg,$(filter %:$(target),$(_all_dist_goal_output_pairs)),$(call word-colon,1,$(dg))))) \
+  $(eval _srcs := $(sort $(foreach sdp,$(filter %:$(target),$(_all_dist_src_dst_pairs)),$(call word-colon,1,$(sdp))))) \
+  $(eval $(call _dist-target-license-metadata-rule,out/dist/$(target),$(_mdir)/out/dist/$(target).meta_lic,$(_srcs))) \
+  $(eval _f := $(_idir)/$(target).shareprojects) \
+  $(eval _n := $(_tdir)/$(target).txt) \
+  $(eval $(call dist-for-goals,$(_goals),$(_f):shareprojects/$(target).shareprojects)) \
+  $(eval $(call dist-for-goals,$(_goals),$(_n):licensetexts/$(target).txt)) \
+  $(eval $(call _share-projects-rule,$(_f),$(foreach t, $(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
+  $(eval $(call _license-texts-rule,$(_goals),$(_n),$(foreach t,$(filter-out $(TARGETS_MISSING_LICENSE_METADATA),out/dist/$(target)),$(_mdir)/$(t).meta_lic))) \
+)
+endef
+
 #------------------------------------------------------------------
 # To be used at the end of the build to collect all the uses of
 # dist-for-goals, and write them into a file for the packaging step to use.
@@ -52,6 +186,15 @@
 # $(1): The file to write
 define dist-write-file
 $(strip \
+  $(call _add_projects_to_share)\
+  $(if $(strip $(ANDROID_REQUIRE_LICENSE_METADATA)),\
+    $(foreach target,$(sort $(TARGETS_MISSING_LICENSE_METADATA)),$(warning target $(target) missing license metadata))\
+    $(if $(strip $(TARGETS_MISSING_LICENSE_METADATA)),\
+      $(if $(filter true error,$(ANDROID_REQUIRE_LICENSE_METADATA)),\
+        $(error $(words $(sort $(TARGETS_MISSING_LICENSE_METADATA))) targets need license metadata))))\
+  $(foreach t,$(sort $(ALL_NON_MODULES)),$(call record-missing-non-module-dependencies,$(t))) \
+  $(eval $(call report-missing-licenses-rule)) \
+  $(eval $(call report-all-notice-library-names-rule)) \
   $(KATI_obsolete_var dist-for-goals,Cannot be used after dist-write-file) \
   $(foreach goal,$(sort $(_all_dist_goals)), \
     $(eval $$(goal): _dist_$$(goal))) \
diff --git a/core/dumpconfig.mk b/core/dumpconfig.mk
index 9b1f2c2..640fe10 100644
--- a/core/dumpconfig.mk
+++ b/core/dumpconfig.mk
@@ -117,7 +117,6 @@
 	9 \
 	LOCAL_PATH \
 	MAKEFILE_LIST \
-	PARENT_PRODUCT_FILES \
 	current_mk \
 	_eiv_ev \
 	_eiv_i \
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index 6b5c030..6f3d14f 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -35,3 +35,7 @@
 	  printf "'\n";)
 
 endif # CALLED_FROM_SETUP
+
+ifneq (,$(RBC_DUMP_CONFIG_FILE))
+$(call dump-variables-rbc,$(RBC_DUMP_CONFIG_FILE))
+endif
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 52d7ddc..0d2cd7f 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -55,9 +55,7 @@
 endif
 symbolic_input := $(inject_module)
 symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-$(symbolic_output) : $(symbolic_input)
-	@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
+$(eval $(call copy-unstripped-elf-file-with-mapping,$(symbolic_input),$(symbolic_output)))
 
 ###########################################################
 ## Store breakpad symbols
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 62ea1b6..7dd9b12 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -135,15 +135,17 @@
   HOST_OS := darwin
 endif
 
-HOST_OS_EXTRA := $(shell uname -rsm)
-ifeq ($(HOST_OS),linux)
-  ifneq ($(wildcard /etc/os-release),)
-    HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+ifeq ($(CALLED_FROM_SETUP),true)
+  HOST_OS_EXTRA := $(shell uname -rsm)
+  ifeq ($(HOST_OS),linux)
+    ifneq ($(wildcard /etc/os-release),)
+      HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+    endif
+  else ifeq ($(HOST_OS),darwin)
+    HOST_OS_EXTRA += $(shell sw_vers -productVersion)
   endif
-else ifeq ($(HOST_OS),darwin)
-  HOST_OS_EXTRA += $(shell sw_vers -productVersion)
+  HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
 endif
-HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
 
 # BUILD_OS is the real host doing the build.
 BUILD_OS := $(HOST_OS)
@@ -323,23 +325,31 @@
 # likely to be relevant to the product or board configuration.
 # Soong config variables are dumped as $(call soong_config_set) calls
 # instead of the raw variable values, because mk2rbc can't read the
-# raw ones.
+# raw ones. There is a final sed command on the output file to
+# remove leading spaces because I couldn't figure out how to remove
+# them in pure make code.
 define dump-variables-rbc
 $(eval _dump_variables_rbc_excluded := \
+  BUILD_NUMBER \
+  DATE \
   LOCAL_PATH \
+  MAKEFILE_LIST \
+  PRODUCTS \
+  PRODUCT_COPY_OUT_% \
+  RBC_PRODUCT_CONFIG \
+  RBC_BOARD_CONFIG \
+  SOONG_% \
   TOPDIR \
   TRACE_BEGIN_SOONG \
-  BOARD_PLAT_PUBLIC_SEPOLICY_DIR \
-  BOARD_PLAT_PRIVATE_SEPOLICY_DIR \
-  USER \
-  SOONG_% \
-  PRODUCT_COPY_OUT_%)\
+  USER)
+$(file >$(OUT_DIR)/dump-variables-rbc-temp.txt,$(subst $(space),$(newline),$(sort $(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES)))))
 $(file >$(1),\
-$(foreach v, $(shell echo $(filter-out $(_dump_variables_rbc_excluded),$(.VARIABLES)) | tr ' ' '\n' | grep -he "^[A-Z][A-Z0-9_]*$$"),\
+$(foreach v, $(shell grep -he "^[A-Z][A-Z0-9_]*$$" $(OUT_DIR)/dump-variables-rbc-temp.txt),\
 $(v) := $(strip $($(v)))$(newline))\
-$(foreach ns,$(SOONG_CONFIG_NAMESPACES),\
-$(foreach v,$(SOONG_CONFIG_$(ns)),\
+$(foreach ns,$(sort $(SOONG_CONFIG_NAMESPACES)),\
+$(foreach v,$(sort $(SOONG_CONFIG_$(ns))),\
 $$(call soong_config_set,$(ns),$(v),$(SOONG_CONFIG_$(ns)_$(v)))$(newline))))
+$(shell sed -i "s/^ *//g" $(1))
 endef
 
 # Read the product specs so we can get TARGET_DEVICE and other
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
deleted file mode 100644
index 5eeb8ac..0000000
--- a/core/host_dalvik_java_library.mk
+++ /dev/null
@@ -1,191 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik java library. These libraries
-# are meant to be used by a dalvik VM instance running on the host.
-# They will be compiled against libcore and not the host JRE.
-#
-
-ifeq ($(HOST_OS),linux)
-USE_CORE_LIB_BOOTCLASSPATH := true
-
-#######################################
-include $(BUILD_SYSTEM)/host_java_library_common.mk
-#######################################
-
-full_classes_turbine_jar := $(intermediates.COMMON)/classes-turbine.jar
-full_classes_header_jarjar := $(intermediates.COMMON)/classes-header-jarjar.jar
-full_classes_header_jar := $(intermediates.COMMON)/classes-header.jar
-full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
-full_classes_combined_jar := $(intermediates.COMMON)/classes-combined.jar
-full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
-full_classes_jar := $(intermediates.COMMON)/classes.jar
-built_dex := $(intermediates.COMMON)/classes.dex
-java_source_list_file := $(intermediates.COMMON)/java-source-list
-
-LOCAL_INTERMEDIATE_TARGETS += \
-    $(full_classes_turbine_jar) \
-    $(full_classes_compiled_jar) \
-    $(full_classes_combined_jar) \
-    $(full_classes_jarjar_jar) \
-    $(full_classes_jar) \
-    $(built_dex) \
-    $(java_source_list_file)
-
-# See comment in java.mk
-ifndef LOCAL_CHECKED_MODULE
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
-else
-LOCAL_CHECKED_MODULE := $(built_dex)
-endif
-endif
-
-#######################################
-include $(BUILD_SYSTEM)/base_rules.mk
-#######################################
-java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) \
-                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
-all_java_sources := $(java_sources)
-
-include $(BUILD_SYSTEM)/java_common.mk
-
-include $(BUILD_SYSTEM)/sdk_check.mk
-
-$(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
-
-# List of dependencies for anything that needs all java sources in place
-java_sources_deps := \
-    $(java_sources) \
-    $(java_resource_sources) \
-    $(LOCAL_SRCJARS) \
-    $(LOCAL_ADDITIONAL_DEPENDENCIES)
-
-$(java_source_list_file): $(java_sources_deps)
-	$(write-java-source-list)
-
-# TODO(b/143658984): goma can't handle the --system argument to javac.
-#$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_LIST_FILE := $(intermediates.COMMON)/srcjar-list
-$(full_classes_compiled_jar): PRIVATE_SRCJAR_INTERMEDIATES_DIR := $(intermediates.COMMON)/srcjars
-$(full_classes_compiled_jar): \
-    $(java_source_list_file) \
-    $(java_sources_deps) \
-    $(full_java_header_libs) \
-    $(full_java_bootclasspath_libs) \
-    $(full_java_system_modules_deps) \
-    $(annotation_processor_deps) \
-    $(NORMALIZE_PATH) \
-    $(JAR_ARGS) \
-    $(ZIPSYNC) \
-    $(SOONG_ZIP) \
-    | $(SOONG_JAVAC_WRAPPER)
-	$(transform-host-java-to-dalvik-package)
-
-ifneq ($(TURBINE_ENABLED),false)
-
-$(full_classes_turbine_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
-$(full_classes_turbine_jar): PRIVATE_SRCJARS := $(LOCAL_SRCJARS)
-$(full_classes_turbine_jar): \
-    $(java_source_list_file) \
-    $(java_sources_deps) \
-    $(full_java_header_libs) \
-    $(full_java_bootclasspath_libs) \
-    $(NORMALIZE_PATH) \
-    $(JAR_ARGS) \
-    $(ZIPTIME) \
-    | $(TURBINE) \
-    $(MERGE_ZIPS)
-	$(transform-java-to-header.jar)
-
-.KATI_RESTAT: $(full_classes_turbine_jar)
-
-# Run jarjar before generate classes-header.jar if necessary.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_header_jarjar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_header_jarjar): $(full_classes_turbine_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	$(call transform-jarjar)
-else
-full_classes_header_jarjar := $(full_classes_turbine_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_header_jarjar),$(full_classes_header_jar)))
-
-endif # TURBINE_ENABLED != false
-
-$(full_classes_combined_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
-$(full_classes_combined_jar): $(full_classes_compiled_jar) \
-                              $(jar_manifest_file) \
-                              $(full_static_java_libs)  | $(MERGE_ZIPS)
-	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
-            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
-            $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
-            $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
-
-# Run jarjar if necessary, otherwise just copy the file.
-ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
-$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-$(full_classes_jarjar_jar): $(full_classes_combined_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
-	$(call transform-jarjar)
-else
-full_classes_jarjar_jar := $(full_classes_combined_jar)
-endif
-
-$(eval $(call copy-one-file,$(full_classes_jarjar_jar),$(full_classes_jar)))
-
-ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-# No dex; all we want are the .class files with resources.
-$(LOCAL_BUILT_MODULE) : $(java_resource_sources)
-$(LOCAL_BUILT_MODULE) : $(full_classes_jar)
-	@echo "host Static Jar: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-target)
-
-else # !LOCAL_IS_STATIC_JAVA_LIBRARY
-$(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
-$(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_jar) $(DX) $(ZIP2ZIP)
-	$(transform-classes.jar-to-dex)
-
-$(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
-$(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
-$(LOCAL_BUILT_MODULE): $(MERGE_ZIPS) $(SOONG_ZIP) $(ZIP2ZIP)
-$(LOCAL_BUILT_MODULE): $(built_dex) $(java_resource_sources)
-	@echo "Host Jar: $(PRIVATE_MODULE) ($@)"
-	rm -rf $@.parts
-	mkdir -p $@.parts
-	$(call create-dex-jar,$@.parts/dex.zip,$(PRIVATE_DEX_FILE))
-	$(call extract-resources-jar,$@.parts/res.zip,$(PRIVATE_SOURCE_ARCHIVE))
-	$(MERGE_ZIPS) -j $@ $@.parts/dex.zip $@.parts/res.zip
-	rm -rf $@.parts
-
-endif # !LOCAL_IS_STATIC_JAVA_LIBRARY
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(call module-target-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SDK_VERSION := $(call module-sdk-version)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MIN_SDK_VERSION := $(call codename-or-sdk-to-sdk,$(call module-min-sdk-version))
-
-USE_CORE_LIB_BOOTCLASSPATH :=
-
-endif
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
deleted file mode 100644
index 78faf73..0000000
--- a/core/host_dalvik_static_java_library.mk
+++ /dev/null
@@ -1,28 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-$(call record-module-type,HOST_DALVIK_STATIC_JAVA_LIBRARY)
-
-#
-# Rules for building a host dalvik static java library.
-# These libraries will be compiled against libcore and not the host
-# JRE.
-#
-LOCAL_UNINSTALLABLE_MODULE := true
-LOCAL_IS_STATIC_JAVA_LIBRARY := true
-
-include $(BUILD_SYSTEM)/host_dalvik_java_library.mk
-
-LOCAL_IS_STATIC_JAVA_LIBRARY :=
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 0f95202..89aa53c 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -56,10 +56,6 @@
 
 include $(BUILD_SYSTEM)/java_common.mk
 
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-
 # List of dependencies for anything that needs all java sources in place
 java_sources_deps := \
     $(java_sources) \
@@ -72,7 +68,6 @@
 
 # TODO(b/143658984): goma can't handle the --system argument to javac.
 #$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 289d16f..5491247 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -5,6 +5,7 @@
 #   my_prebuilt_jni_libs
 #   my_installed_module_stem (from configure_module_stem.mk)
 #   partition_tag (from base_rules.mk)
+#   partition_lib_pairs
 #   my_prebuilt_src_file (from prebuilt_internal.mk)
 #
 # Output variables:
@@ -66,13 +67,32 @@
   ifeq ($(filter address,$(SANITIZE_TARGET)),)
     my_symlink_target_dir := $(patsubst $(PRODUCT_OUT)%,%,\
       $(my_shared_library_path))
-    $(foreach lib,$(my_jni_filenames),\
-      $(call symlink-file, \
-        $(my_shared_library_path)/$(lib), \
-        $(my_symlink_target_dir)/$(lib), \
-        $(my_app_lib_path)/$(lib)) \
-      $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib)) \
-      $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib)))
+
+    ifdef partition_lib_pairs
+      # Support cross-partition jni lib dependency for bp modules
+      # API domain check is done in Soong
+      $(foreach pl_pair,$(partition_lib_pairs),\
+        $(eval lib_name := $(call word-colon, 1, $(pl_pair)))\
+        $(eval lib_partition := $(call word-colon, 2, $(pl_pair)))\
+        $(eval shared_library_path := $(call get_non_asan_path,\
+        $($(my_2nd_arch_prefix)TARGET_OUT$(lib_partition)_SHARED_LIBRARIES)))\
+        $(call symlink-file,\
+          $(shared_library_path)/$(lib_name).so,\
+          $(my_symlink_target_dir)/$(lib_name).so,\
+          $(my_app_lib_path)/$(lib_name).so)\
+        $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib_name).so)\
+        $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib_name).so))
+
+    else
+      # Cross-partition jni lib dependency currently not supported for mk modules
+      $(foreach lib,$(my_jni_filenames),\
+        $(call symlink-file, \
+          $(my_shared_library_path)/$(lib), \
+          $(my_symlink_target_dir)/$(lib), \
+          $(my_app_lib_path)/$(lib)) \
+        $(eval $$(LOCAL_INSTALLED_MODULE) : $$(my_app_lib_path)/$$(lib)) \
+        $(eval ALL_MODULES.$(my_register_name).INSTALLED += $$(my_app_lib_path)/$$(lib)))
+    endif # partition_lib_pairs
   endif
 
   # Clear jni_shared_libraries to not embed it into the apk.
diff --git a/core/java.mk b/core/java.mk
index 123cbe8..b13ef4d 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -200,10 +200,6 @@
 $(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_stubs_jar)))
 ALL_MODULES.$(my_register_name).STUBS := $(full_classes_stubs_jar)
 
-# The layers file allows you to enforce a layering between java packages.
-# Run build/make/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
 $(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
 
 # Compile the java files to a .jar file.
@@ -266,6 +262,7 @@
 
 # TODO(b/143658984): goma can't handle the --system argument to javac.
 #$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(GOMA_POOL)
+$(full_classes_compiled_jar): .KATI_NINJA_POOL := $(JAVAC_NINJA_POOL)
 $(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS) $(annotation_processor_flags)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
@@ -489,15 +486,17 @@
 $(built_dex_intermediate): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
 
 ifdef LOCAL_PROGUARD_ENABLED
+  $(built_dex_intermediate): .KATI_NINJA_POOL := $(R8_NINJA_POOL)
   $(built_dex_intermediate): PRIVATE_EXTRA_INPUT_JAR := $(extra_input_jar)
   $(built_dex_intermediate): PRIVATE_PROGUARD_FLAGS := $(legacy_proguard_flags) $(common_proguard_flags) $(LOCAL_PROGUARD_FLAGS)
   $(built_dex_intermediate): PRIVATE_PROGUARD_DICTIONARY := $(proguard_dictionary)
-  $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8_COMPAT_PROGUARD) $(LOCAL_PROGUARD_FLAGS_DEPS)
+  $(built_dex_intermediate) : $(full_classes_pre_proguard_jar) $(extra_input_jar) $(my_proguard_sdk_raise) $(common_proguard_flag_files) $(legacy_proguard_lib_deps) $(R8) $(LOCAL_PROGUARD_FLAGS_DEPS)
 	$(transform-jar-to-dex-r8)
 else # !LOCAL_PROGUARD_ENABLED
+  $(built_dex_intermediate): .KATI_NINJA_POOL := $(D8_NINJA_POOL)
   $(built_dex_intermediate): PRIVATE_D8_LIBS := $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
   $(built_dex_intermediate): $(full_java_bootclasspath_libs) $(full_shared_java_header_libs)
-  $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(DX) $(ZIP2ZIP)
+  $(built_dex_intermediate): $(full_classes_pre_proguard_jar) $(D8) $(ZIP2ZIP)
 	$(transform-classes.jar-to-dex)
 endif
 
diff --git a/core/layoutlib_fonts.mk b/core/layoutlib_fonts.mk
new file mode 100644
index 0000000..d2a814f
--- /dev/null
+++ b/core/layoutlib_fonts.mk
@@ -0,0 +1,35 @@
+# Fonts for layoutlib
+
+FONT_TEMP := $(call intermediates-dir-for,PACKAGING,fonts,HOST,COMMON)
+
+# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
+font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
+font_config := $(addprefix $(FONT_TEMP)/, $(notdir $(font_config)))
+
+$(font_config): $(FONT_TEMP)/%.xml: \
+			frameworks/base/data/fonts/%.xml
+	$(hide) mkdir -p $(dir $@)
+	$(hide) cp -vf $< $@
+
+# List of fonts on the device that we want to ship. This is all .ttf, .ttc and .otf fonts.
+fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf $(TARGET_OUT)/fonts/%.ttc $(TARGET_OUT)/fonts/%.otf, $(INTERNAL_SYSTEMIMAGE_FILES))
+fonts_device := $(addprefix $(FONT_TEMP)/, $(notdir $(fonts_device)))
+
+# TODO: If the font file is a symlink, reuse the font renamed from the symlink
+# target.
+$(fonts_device): $(FONT_TEMP)/%: $(TARGET_OUT)/fonts/%
+	$(hide) mkdir -p $(dir $@)
+	$(hide) cp -vf $< $@
+
+# List of all dependencies - all fonts and configuration files.
+FONT_FILES := $(fonts_device) $(font_config)
+
+.PHONY: layoutlib layoutlib-tests
+layoutlib layoutlib-tests: $(FONT_FILES)
+
+$(call dist-for-goals, layoutlib, $(foreach m,$(FONT_FILES), $(m):layoutlib_native/fonts/$(notdir $(m))))
+
+FONT_TEMP :=
+font_config :=
+fonts_device :=
+FONT_FILES :=
diff --git a/core/main.mk b/core/main.mk
index 72958da..2e39601 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -460,6 +460,9 @@
 
 ADDITIONAL_SYSTEM_PROPERTIES += net.bt.name=Android
 
+# This property is set by flashing debug boot image, so default to false.
+ADDITIONAL_SYSTEM_PROPERTIES += ro.force.debuggable=0
+
 # ------------------------------------------------------------
 # Define a function that, given a list of module tags, returns
 # non-empty if that module should be installed in /system.
@@ -931,10 +934,11 @@
     $(eval my_deps := $(call get-all-shared-libs-deps,$(m)))\
     $(foreach dep,$(my_deps),\
       $(foreach f,$(ALL_MODULES.$(dep).HOST_SHARED_LIBRARY_FILES),\
-        $(if $(filter $(suite),device-tests general-tests),\
+        $(if $(filter $(suite),device-tests general-tests art-host-tests host-unit-tests),\
           $(eval my_testcases := $(HOST_OUT_TESTCASES)),\
           $(eval my_testcases := $$(COMPATIBILITY_TESTCASES_OUT_$(suite))))\
         $(eval target := $(my_testcases)/$(lastword $(subst /, ,$(dir $(f))))/$(notdir $(f)))\
+        $(if $(strip $(ALL_TARGETS.$(target).META_LIC)),,$(call declare-copy-target-license-metadata,$(target),$(f)))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
           $$(COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES) $(f):$(target))\
         $(eval COMPATIBILITY.$(suite).HOST_SHARED_LIBRARY.FILES := \
@@ -1234,42 +1238,14 @@
 #   See the select-bitness-of-required-modules definition.
 # $(1): product makefile
 
-# TODO(asmundak):
-# `product-installed-files` and `host-installed-files` macros below used to
-# call `get-product-var` directly to obtain per-file configuration variable
-# values (the value of variable FOO is fetched from PRODUCT.<product-makefile>.FOO).
-# Starlark-based configuration does not maintain per-file variable variable
-# values. To work around this problem, we utilize the fact that
-# `product-installed-files` and `host-installed-files` are called only in
-# two places:
-# 1. For the top-level product makefile (in this file). In this case
-#    $(call get-product-var <product>, FOO) is the same as $(FOO) as the
-#    product configuration has been run already. Therefore we define
-#    _product-var macro to pick the values directly from product config
-#    variables when using Starlark-based configuration.
-# 2. To check the path requirements (in artifact_path_requirements.mk).
-#    Starlark-based configuration does not perform this check at the moment.
-# In the longer run most of the logic of this file will be moved to the
-# Starlark.
-
-ifndef RBC_PRODUCT_CONFIG
-define _product-var
-  $(call get-product-var,$(1),$(2))
-endef
-else
-define _product-var
-  $(call $(2))
-endef
-endif
-
 define product-installed-files
   $(eval _pif_modules := \
-    $(call _product-var,$(1),PRODUCT_PACKAGES) \
-    $(if $(filter eng,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_ENG)) \
-    $(if $(filter debug,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG)) \
-    $(if $(filter tests,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
-    $(if $(filter asan,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
-    $(if $(filter java_coverage,$(tags_to_install)),$(call _product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+    $(call get-product-var,$(1),PRODUCT_PACKAGES) \
+    $(if $(filter eng,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ENG)) \
+    $(if $(filter debug,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG)) \
+    $(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
+    $(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
+    $(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
     $(call auto-included-modules) \
   ) \
   $(eval ### Filter out the overridden packages and executables before doing expansion) \
@@ -1280,13 +1256,13 @@
   $(call expand-required-modules,_pif_modules,$(_pif_modules),$(_pif_overrides)) \
   $(filter-out $(HOST_OUT_ROOT)/%,$(call module-installed-files, $(_pif_modules))) \
   $(call resolve-product-relative-paths,\
-    $(foreach cf,$(call _product-var,$(1),PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
+    $(foreach cf,$(call get-product-var,$(1),PRODUCT_COPY_FILES),$(call word-colon,2,$(cf))))
 endef
 
 # Similar to product-installed-files above, but handles PRODUCT_HOST_PACKAGES instead
 # This does support the :32 / :64 syntax, but does not support module overrides.
 define host-installed-files
-  $(eval _hif_modules := $(call _product-var,$(1),PRODUCT_HOST_PACKAGES)) \
+  $(eval _hif_modules := $(call get-product-var,$(1),PRODUCT_HOST_PACKAGES)) \
   $(eval ### Split host vs host cross modules) \
   $(eval _hcif_modules := $(filter host_cross_%,$(_hif_modules))) \
   $(eval _hif_modules := $(filter-out host_cross_%,$(_hif_modules))) \
@@ -1375,7 +1351,7 @@
 
   # Verify the artifact path requirements made by included products.
   is_asan := $(if $(filter address,$(SANITIZE_TARGET)),true)
-  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS),$(RBC_PRODUCT_CONFIG),$(RBC_BOARD_CONFIG)))
+  ifeq (,$(or $(is_asan),$(DISABLE_ARTIFACT_PATH_REQUIREMENTS)))
     include $(BUILD_SYSTEM)/artifact_path_requirements.mk
   endif
 else
@@ -1483,12 +1459,6 @@
 # fix-notice-deps replaces those unadorned module names with every built variant.
 $(call fix-notice-deps)
 
-# Create a license metadata rule per module. Could happen in base_rules.mk or
-# notice_files.mk; except, it has to happen after fix-notice-deps to avoid
-# missing dependency errors.
-$(call build-license-metadata)
-
-
 # These are additional goals that we build, in order to make sure that there
 # is as little code as possible in the tree that doesn't build.
 modules_to_check := $(foreach m,$(ALL_MODULES),$(ALL_MODULES.$(m).CHECKED))
@@ -1761,16 +1731,20 @@
   endif
 
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP))
+  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_MAPPING))
+  $(call declare-container-license-deps,$(PROGUARD_DICT_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
+  $(call declare-container-license-deps,$(PROGUARD_USAGE_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(SYMBOLS_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP))
+  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP) $(SYMBOLS_MAPPING))
+  $(call declare-container-license-deps,$(SYMBOLS_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(COVERAGE_ZIP) : $(apps_only_installed_files)
   $(call dist-for-goals,apps_only, $(COVERAGE_ZIP))
+  $(call declare-container-license-deps,$(COVERAGE_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
 apps_only: $(unbundled_build_modules)
 
@@ -1818,7 +1792,9 @@
     $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
     $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
     $(SYMBOLS_ZIP) \
+    $(SYMBOLS_MAPPING) \
     $(PROGUARD_DICT_ZIP) \
+    $(PROGUARD_DICT_MAPPING) \
     $(PROGUARD_USAGE_ZIP) \
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
@@ -1870,30 +1846,28 @@
     $(INSTALLED_FILES_JSON_ROOT) \
   )
 
-  ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-    $(call dist-for-goals, droidcore-unbundled, \
-      $(INSTALLED_FILES_FILE_RAMDISK) \
-      $(INSTALLED_FILES_JSON_RAMDISK) \
-      $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
-      $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
-      $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
-      $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
-      $(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK) \
-      $(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK) \
-      $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
-      $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
-      $(INSTALLED_DEBUG_RAMDISK_TARGET) \
-      $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
-      $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
-      $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
-      $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
-      $(INSTALLED_VENDOR_TEST_HARNESS_RAMDISK_TARGET) \
-      $(INSTALLED_VENDOR_TEST_HARNESS_BOOTIMAGE_TARGET) \
-      $(INSTALLED_VENDOR_RAMDISK_TARGET) \
-      $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
-      $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
-    )
-  endif
+  $(call dist-for-goals, droidcore-unbundled, \
+    $(INSTALLED_FILES_FILE_RAMDISK) \
+    $(INSTALLED_FILES_JSON_RAMDISK) \
+    $(INSTALLED_FILES_FILE_DEBUG_RAMDISK) \
+    $(INSTALLED_FILES_JSON_DEBUG_RAMDISK) \
+    $(INSTALLED_FILES_FILE_VENDOR_RAMDISK) \
+    $(INSTALLED_FILES_JSON_VENDOR_RAMDISK) \
+    $(INSTALLED_FILES_FILE_VENDOR_KERNEL_RAMDISK) \
+    $(INSTALLED_FILES_JSON_VENDOR_KERNEL_RAMDISK) \
+    $(INSTALLED_FILES_FILE_VENDOR_DEBUG_RAMDISK) \
+    $(INSTALLED_FILES_JSON_VENDOR_DEBUG_RAMDISK) \
+    $(INSTALLED_DEBUG_RAMDISK_TARGET) \
+    $(INSTALLED_DEBUG_BOOTIMAGE_TARGET) \
+    $(INSTALLED_TEST_HARNESS_RAMDISK_TARGET) \
+    $(INSTALLED_TEST_HARNESS_BOOTIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_DEBUG_BOOTIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_TEST_HARNESS_RAMDISK_TARGET) \
+    $(INSTALLED_VENDOR_TEST_HARNESS_BOOTIMAGE_TARGET) \
+    $(INSTALLED_VENDOR_RAMDISK_TARGET) \
+    $(INSTALLED_VENDOR_DEBUG_RAMDISK_TARGET) \
+    $(INSTALLED_VENDOR_KERNEL_RAMDISK_TARGET) \
+  )
 
   ifeq ($(PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST),true)
     $(call dist-for-goals, droidcore-unbundled, $(INSTALLED_BOOTIMAGE_TARGET))
@@ -1922,6 +1896,8 @@
 	$(hide) mkdir -p $(dir $@)
 	$(hide) $(APICHECK_COMMAND) --input-api-jar $< --api-xml $@
 
+  $(foreach xml,$(sort $(api_xmls)),$(call declare-1p-target,$(xml),))
+
   $(call dist-for-goals, dist_files, $(api_xmls))
   api_xmls :=
 
@@ -1961,9 +1937,6 @@
 sdk: $(ALL_SDK_TARGETS)
 $(call dist-for-goals,sdk, \
     $(ALL_SDK_TARGETS) \
-    $(SYMBOLS_ZIP) \
-    $(COVERAGE_ZIP) \
-    $(APPCOMPAT_ZIP) \
     $(INSTALLED_BUILD_PROP_TARGET) \
 )
 endif
@@ -2033,6 +2006,11 @@
   droidcore: ${APEX_ALLOWED_DEPS_CHECK}
 endif
 
+# Create a license metadata rule per module. Could happen in base_rules.mk or
+# notice_files.mk; except, it has to happen after fix-notice-deps to avoid
+# missing dependency errors.
+$(call build-license-metadata)
+
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index 2157c9e..2b5ceee 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -25,7 +25,7 @@
 	cts \
 	custom_images \
 	dicttool_aosp \
-	dump-products \
+	docs \
 	eng \
 	oem_image \
 	online-system-api-sdk-docs \
diff --git a/core/node_fns.mk b/core/node_fns.mk
index 2243cd7..144eb8b 100644
--- a/core/node_fns.mk
+++ b/core/node_fns.mk
@@ -83,27 +83,17 @@
 # If needle appears multiple times, only the first occurrance
 # will survive.
 #
-# How it works:
-#
-# - Stick everything in haystack into a single word,
-#   with "|||" separating the words.
-# - Replace occurrances of "|||$(needle)|||" with "||| |||",
-#   breaking haystack back into multiple words, with spaces
-#   where needle appeared.
-# - Add needle between the first and second words of haystack.
-# - Replace "|||" with spaces, breaking haystack back into
-#   individual words.
-#
 define uniq-word
 $(strip \
   $(if $(filter-out 0 1,$(words $(filter $(2),$(1)))), \
-    $(eval h := |||$(subst $(space),|||,$(strip $(1)))|||) \
-    $(eval h := $(subst |||$(strip $(2))|||,|||$(space)|||,$(h))) \
-    $(eval h := $(word 1,$(h)) $(2) $(wordlist 2,9999,$(h))) \
-    $(subst |||,$(space),$(h)) \
-   , \
-    $(1) \
- ))
+    $(eval _uniq_word_seen :=) \
+    $(foreach w,$(1), \
+      $(if $(filter $(2),$(w)), \
+        $(if $(_uniq_word_seen),, \
+          $(w) \
+          $(eval _uniq_word_seen := true)), \
+        $(w))), \
+  $(1)))
 endef
 
 INHERIT_TAG := @inherit:
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 8b2dade..a5852cc 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -11,6 +11,8 @@
 
 ifneq (,$(strip $(LOCAL_LICENSE_PACKAGE_NAME)))
 license_package_name:=$(strip $(LOCAL_LICENSE_PACKAGE_NAME))
+else
+license_package_name:=
 endif
 
 ifneq (,$(strip $(LOCAL_LICENSE_INSTALL_MAP)))
@@ -125,16 +127,21 @@
 module_license_metadata :=
 
 ifdef my_register_name
-  module_license_metadata := $(call local-intermediates-dir)/$(my_register_name).meta_lic
+  module_license_metadata := $(call local-meta-intermediates-dir)/$(my_register_name).meta_lic
 
-  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(my_test_data) $(my_test_config),\
+  $(foreach target,$(ALL_MODULES.$(my_register_name).BUILT) $(ALL_MODULES.$(my_register_name).INSTALLED) $(foreach bi,$(LOCAL_SOONG_BUILT_INSTALLED),$(call word-colon,1,$(bi))),\
     $(eval ALL_TARGETS.$(target).META_LIC := $(module_license_metadata)))
 
+  $(foreach f,$(my_test_data) $(my_test_config),\
+    $(if $(strip $(ALL_TARGETS.$(call word-colon,1,$(f)).META_LIC)), \
+      $(call declare-copy-target-license-metadata,$(call word-colon,2,$(f)),$(call word-colon,1,$(f))), \
+      $(eval ALL_TARGETS.$(call word-colon,2,$(f)).META_LIC := $(module_license_metadata))))
+
   ALL_MODULES.$(my_register_name).META_LIC := $(strip $(ALL_MODULES.$(my_register_name).META_LIC) $(module_license_metadata))
 
   ifdef LOCAL_SOONG_LICENSE_METADATA
     # Soong modules have already produced a license metadata file, copy it to where Make expects it.
-    $(eval $(call copy-one-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata)))
+    $(eval $(call copy-one-license-metadata-file, $(LOCAL_SOONG_LICENSE_METADATA), $(module_license_metadata),$(ALL_MODULES.$(my_register_name).BUILT),$(ALL_MODUES.$(my_register_name).INSTALLED)))
   else
     # Make modules don't have enough information to produce a license metadata rule until after fix-notice-deps
     # has been called, store the necessary information until later.
@@ -157,102 +164,4 @@
 ALL_MODULES.$(my_register_name).NOTICES := $(ALL_MODULES.$(my_register_name).NOTICES) $(notice_file)
 endif
 
-# This relies on the name of the directory in PRODUCT_OUT matching where
-# it's installed on the target - i.e. system, data, etc.  This does
-# not work for root and isn't exact, but it's probably good enough for
-# compliance.
-# Includes the leading slash
-ifdef LOCAL_INSTALLED_MODULE
-  module_installed_filename := $(patsubst $(PRODUCT_OUT)/%,%,$(LOCAL_INSTALLED_MODULE))
-else
-  # This module isn't installable
-  ifneq ($(filter  STATIC_LIBRARIES RLIB_LIBRARIES PROC_MACRO_LIBRARIES HEADER_LIBRARIES,$(LOCAL_MODULE_CLASS)),)
-    # Stick the static libraries with the dynamic libraries.
-    # We can't use xxx_OUT_STATIC_LIBRARIES because it points into
-    # device-obj or host-obj.
-    module_installed_filename := \
-        $(patsubst $(PRODUCT_OUT)/%,%,$($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))/$(notdir $(LOCAL_BUILT_MODULE))
-  else ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
-    # Shared modules may be uninstallable(e.g. TARGET_SKIP_CURRENT_VNDK=true)
-    module_installed_filename :=
-  else
-    ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-      # Stick the static java libraries with the regular java libraries.
-      module_leaf := $(notdir $(LOCAL_BUILT_MODULE))
-      # javalib.jar is the default name for the build module (and isn't meaningful)
-      # If that's what we have, substitute the module name instead.  These files
-      # aren't included on the device, so this name is synthetic anyway.
-      # Extra path "static" is added to try to avoid name conflict between the notice file of
-      # this 'uninstallable' Java module and the notice file for another 'installable' Java module
-      # whose stem is the same as this module's name.
-      ifneq ($(filter javalib.jar,$(module_leaf)),)
-        module_leaf := static/$(LOCAL_MODULE).jar
-      endif
-      module_installed_filename := \
-          $(patsubst $(PRODUCT_OUT)/%,%,$($(my_prefix)OUT_JAVA_LIBRARIES))/$(module_leaf)
-    else ifneq ($(filter ETC DATA,$(LOCAL_MODULE_CLASS)),)
-      # ETC and DATA modules may be uninstallable, yet still have a NOTICE file.
-      # e.g. apex components
-      module_installed_filename :=
-    else ifneq (,$(and $(filter %.sdk,$(LOCAL_MODULE)),$(filter $(patsubst %.sdk,%,$(LOCAL_MODULE)),$(SOONG_SDK_VARIANT_MODULES))))
-      # Soong produces uninstallable *.sdk shared libraries for embedding in APKs.
-      module_installed_filename := \
-          $(patsubst $(PRODUCT_OUT)/%,%,$($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_SHARED_LIBRARIES))/$(notdir $(LOCAL_BUILT_MODULE))
-    endif # JAVA_LIBRARIES
-  endif # STATIC_LIBRARIES
-endif
-
-ifdef module_installed_filename
-
-# In case it's actually a host file
-module_installed_filename := $(patsubst $(HOST_OUT)/%,%,$(module_installed_filename))
-module_installed_filename := $(patsubst $(HOST_CROSS_OUT)/%,%,$(module_installed_filename))
-
-installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
-
-$(installed_notice_file): $(module_license_metadata)
-
-ifdef my_register_name
-ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE := $(ALL_MODULES.$(my_register_name).INSTALLED_NOTICE_FILE) $(installed_notice_file)
-ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES := $(ALL_MODULES.$(my_register_name).MODULE_INSTALLED_FILENAMES) $(module_installed_filename)
-INSTALLED_NOTICE_FILES.$(installed_notice_file).MODULE := $(my_register_name)
-else
-$(installed_notice_file): PRIVATE_INSTALLED_MODULE := $(module_installed_filename)
-$(installed_notice_file) : PRIVATE_NOTICES := $(sort $(foreach n,$(notice_file),$(if $(filter %:%,$(n)), $(call word-colon,1,$(n)), $(n))))
-
-$(installed_notice_file): $(foreach n,$(notice_file),$(if $(filter %:%,$(n)), $(call word-colon,1,$(n)), $(n)))
-	@echo Notice file: $< -- $@
-	$(hide) mkdir -p $(dir $@)
-	$(hide) awk 'FNR==1 && NR > 1 {print "\n"} {print}' $(PRIVATE_NOTICES) > $@
-endif
-
-ifdef LOCAL_INSTALLED_MODULE
-# Make LOCAL_INSTALLED_MODULE depend on NOTICE files if they exist
-# libraries so they get installed along with it.  Make it an order-only
-# dependency so we don't re-install a module when the NOTICE changes.
-$(LOCAL_INSTALLED_MODULE): | $(installed_notice_file)
-endif
-
-# To facilitate collecting NOTICE files for apps_only build,
-# we install the NOTICE file even if a module gets built but not installed,
-# because shared jni libraries won't be installed to the system image.
-ifdef TARGET_BUILD_APPS
-# for static Java libraries, we don't need to even build LOCAL_BUILT_MODULE,
-# but just javalib.jar in the common intermediate dir.
-ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-$(intermediates.COMMON)/javalib.jar : | $(installed_notice_file)
-else
-$(LOCAL_BUILT_MODULE): | $(installed_notice_file)
-endif  # JAVA_LIBRARIES
-endif  # TARGET_BUILD_APPS
-
-endif  # module_installed_filename
 endif  # notice_file
-
-# Create a predictable, phony target to build this notice file.
-# Define it even if the notice file doesn't exist so that other
-# modules can depend on it.
-notice_target := NOTICE-$(if \
-    $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-$(LOCAL_MODULE_CLASS)-$(LOCAL_MODULE)
-.PHONY: $(notice_target)
-$(notice_target): $(installed_notice_file)
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index d8d3c78..db7c422 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -5,7 +5,7 @@
 
 ifneq (,$(SYSTEM_NOTICE_DEPS))
 
-SYSTEM_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS)
+SYSTEM_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS) $(UNMOUNTED_NOTICE_VENDOR_DEPS)
 
 ifneq ($(PRODUCT_NOTICE_SPLIT),true)
 $(eval $(call html-notice-rule,$(target_notice_file_html_gz),"System image",$(system_notice_file_message),$(SYSTEM_NOTICE_DEPS),$(SYSTEM_NOTICE_DEPS)))
@@ -23,7 +23,6 @@
 
 $(call declare-0p-target,$(target_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_notice_html_or_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_notice_html_or_xml_gz)
 endif
 
 .PHONY: vendorlicense
@@ -31,22 +30,21 @@
 
 ifneq (,$(VENDOR_NOTICE_DEPS))
 
-VENDOR_NOTICE_DEPS += $(UNMOUNTED_NOTICE_DEPS)
+VENDOR_NOTICE_DEPS += $(UNMOUNTED_NOTICE_VENDOR_DEPS)
 
 $(eval $(call text-notice-rule,$(target_vendor_notice_file_txt),"Vendor image", \
          "Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
-         $(VENDOR_NOTICE_DEPS)))
+         $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_vendor_notice_file_xml_gz),"Vendor image", \
          "Notices for files contained in all filesystem images except system/system_ext/product/odm/vendor_dlkm/odm_dlkm in this directory:", \
-         $(VENDOR_NOTICE_DEPS)))
+         $(VENDOR_NOTICE_DEPS),$(VENDOR_NOTICE_DEPS)))
 
 $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_vendor_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_notice_xml_gz)
 endif
 
 .PHONY: odmlicense
@@ -55,18 +53,17 @@
 ifneq (,$(ODM_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_odm_notice_file_txt),"ODM filesystem image", \
          "Notices for files contained in the odm filesystem image in this directory:", \
-         $(ODM_NOTICE_DEPS)))
+         $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_odm_notice_file_xml_gz),"ODM filesystem image", \
          "Notices for files contained in the odm filesystem image in this directory:", \
-         $(ODM_NOTICE_DEPS)))
+         $(ODM_NOTICE_DEPS),$(ODM_NOTICE_DEPS)))
 
 $(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_odm_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_odm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_notice_xml_gz)
 endif
 
 .PHONY: oemlicense
@@ -78,18 +75,17 @@
 ifneq (,$(PRODUCT_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_product_notice_file_txt),"Product image", \
          "Notices for files contained in the product filesystem image in this directory:", \
-         $(PRODUCT_NOTICE_DEPS)))
+         $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_product_notice_file_xml_gz),"Product image", \
          "Notices for files contained in the product filesystem image in this directory:", \
-         $(PRODUCT_NOTICE_DEPS)))
+         $(PRODUCT_NOTICE_DEPS),$(PRODUCT_NOTICE_DEPS)))
 
 $(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_product_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_product_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_product_notice_xml_gz)
 endif
 
 .PHONY: systemextlicense
@@ -98,18 +94,17 @@
 ifneq (,$(SYSTEM_EXT_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_system_ext_notice_file_txt),"System_ext image", \
          "Notices for files contained in the system_ext filesystem image in this directory:", \
-         $(SYSTEM_EXT_NOTICE_DEPS)))
+         $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_system_ext_notice_file_xml_gz),"System_ext image", \
          "Notices for files contained in the system_ext filesystem image in this directory:", \
-         $(SYSTEM_EXT_NOTICE_DEPS)))
+         $(SYSTEM_EXT_NOTICE_DEPS),$(SYSTEM_EXT_NOTICE_DEPS)))
 
 $(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_ext_notice_xml_gz)
 endif
 
 .PHONY: vendor_dlkmlicense
@@ -118,18 +113,17 @@
 ifneq (,$(VENDOR_DLKM_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_vendor_dlkm_notice_file_txt),"Vendor_dlkm image", \
          "Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
-         $(VENDOR_DLKM_NOTICE_DEPS)))
+         $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_vendor_dlkm_notice_file_xml_gz),"Vendor_dlkm image", \
          "Notices for files contained in the vendor_dlkm filesystem image in this directory:", \
-         $(VENDOR_DLKM_NOTICE_DEPS)))
+         $(VENDOR_DLKM_NOTICE_DEPS),$(VENDOR_DLKM_NOTICE_DEPS)))
 
 $(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_vendor_dlkm_notice_xml_gz)
 endif
 
 .PHONY: odm_dlkmlicense
@@ -138,18 +132,17 @@
 ifneq (,$(ODM_DLKM_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_odm_dlkm_notice_file_txt),"ODM_dlkm filesystem image", \
          "Notices for files contained in the odm_dlkm filesystem image in this directory:", \
-         $(ODM_DLKM_NOTICE_DEPS)))
+         $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_odm_dlkm_notice_file_xml_gz),"ODM_dlkm filesystem image", \
          "Notices for files contained in the odm_dlkm filesystem image in this directory:", \
-         $(ODM_DLMK_NOTICE_DEPS)))
+         $(ODM_DLKM_NOTICE_DEPS),$(ODM_DLKM_NOTICE_DEPS)))
 
 $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_odm_dlkm_notice_xml_gz)
 endif
 
 .PHONY: system_dlkmlicense
@@ -158,18 +151,17 @@
 ifneq (,$(SYSTEM_DLKM_NOTICE_DEPS))
 $(eval $(call text-notice-rule,$(target_system_dlkm_notice_file_txt),"System_dlkm filesystem image", \
          "Notices for files contained in the system_dlkm filesystem image in this directory:", \
-         $(SYSTEM_DLKM_NOTICE_DEPS)))
+         $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
 
 $(eval $(call xml-notice-rule,$(target_system_dlkm_notice_file_xml_gz),"System_dlkm filesystem image", \
          "Notices for files contained in the system_dlkm filesystem image in this directory:", \
-         $(SYSTEM_DLMK_NOTICE_DEPS)))
+         $(SYSTEM_DLKM_NOTICE_DEPS),$(SYSTEM_DLKM_NOTICE_DEPS)))
 
 $(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
 $(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
 $(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
-ALL_DEFAULT_INSTALLED_MODULES += $(installed_system_dlkm_notice_xml_gz)
 endif
 
 endif # not TARGET_BUILD_APPS
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 8199ad2..c7a173b 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -481,6 +481,8 @@
 $(LOCAL_BUILT_MODULE): $(LOCAL_CERTIFICATE_LINEAGE)
 $(LOCAL_BUILT_MODULE): PRIVATE_CERTIFICATE_LINEAGE := $(LOCAL_CERTIFICATE_LINEAGE)
 
+$(LOCAL_BUILT_MODULE): PRIVATE_ROTATION_MIN_SDK_VERSION := $(LOCAL_ROTATION_MIN_SDK_VERSION)
+
 # Set a actual_partition_tag (calculated in base_rules.mk) for the package.
 PACKAGES.$(LOCAL_PACKAGE_NAME).PARTITION := $(actual_partition_tag)
 
diff --git a/core/product-graph.mk b/core/product-graph.mk
index 6d51db1..4a44837 100644
--- a/core/product-graph.mk
+++ b/core/product-graph.mk
@@ -15,45 +15,27 @@
 #
 
 # the sort also acts as a strip to remove the single space entries that creep in because of the evals
-define gather-all-products
+define gather-all-makefiles-for-current-product
 $(eval _all_products_visited := )\
-$(sort $(call all-products-inner, $(PARENT_PRODUCT_FILES)))
+$(sort $(call gather-all-makefiles-for-current-product-inner,$(INTERNAL_PRODUCT)))
 endef
 
-define all-products-inner
+define gather-all-makefiles-for-current-product-inner
 	$(foreach p,$(1),\
 		$(if $(filter $(p),$(_all_products_visited)),, \
 			$(p) \
 			$(eval _all_products_visited += $(p)) \
-			$(call all-products-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
+			$(call gather-all-makefiles-for-current-product-inner, $(PRODUCTS.$(strip $(p)).INHERITS_FROM))
 		) \
 	)
 endef
 
-this_makefile := build/make/core/product-graph.mk
-
-products_graph := $(OUT_DIR)/products.dot
-ifeq ($(strip $(ANDROID_PRODUCT_GRAPH)),)
-products_list := $(INTERNAL_PRODUCT)
-else
-ifeq ($(strip $(ANDROID_PRODUCT_GRAPH)),--all)
-products_list := --all
-else
-products_list := $(foreach prod,$(ANDROID_PRODUCT_GRAPH),$(call resolve-short-product-name,$(prod)))
-endif
-endif
-
-all_products := $(call gather-all-products)
-
-open_parethesis := (
-close_parenthesis := )
-
 node_color_target := orange
 node_color_common := beige
 node_color_vendor := lavenderblush
 node_color_default := white
 define node-color
-$(if $(filter $(1),$(PRIVATE_PRODUCTS_FILTER)),\
+$(if $(filter $(1),$(PRIVATE_TOP_LEVEL_MAKEFILE)),\
   $(node_color_target),\
   $(if $(filter build/make/target/product/%,$(1)),\
     $(node_color_common),\
@@ -62,45 +44,36 @@
 )
 endef
 
+open_parethesis := (
+close_parenthesis := )
+
 # Emit properties of a product node to a file.
 # $(1) the product
 # $(2) the output file
 define emit-product-node-props
 $(hide) echo \"$(1)\" [ \
-label=\"$(dir $(1))\\n$(notdir $(1))\\n\\n$(subst $(close_parenthesis),,$(subst $(open_parethesis),,$(call get-product-var,$(1),PRODUCT_MODEL)))\\n$(call get-product-var,$(1),PRODUCT_DEVICE)\" \
+label=\"$(dir $(1))\\n$(notdir $(1))$(if $(filter $(1),$(PRIVATE_TOP_LEVEL_MAKEFILE)),$(subst $(open_parethesis),,$(subst $(close_parenthesis),,\\n\\n$(PRODUCT_MODEL)\\n$(PRODUCT_DEVICE))))\" \
 style=\"filled\" fillcolor=\"$(strip $(call node-color,$(1)))\" \
 colorscheme=\"svg\" fontcolor=\"darkblue\" \
 ] >> $(2)
 
 endef
 
-$(products_graph): PRIVATE_PRODUCTS := $(all_products)
-$(products_graph): PRIVATE_PRODUCTS_FILTER := $(products_list)
+products_graph := $(OUT_DIR)/products.dot
 
-$(products_graph): $(this_makefile)
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
-	@echo Product graph DOT: $@ for $(PRIVATE_PRODUCTS_FILTER)
-	$(hide) echo 'digraph {' > $@.in
-	$(hide) echo 'graph [ ratio=.5 ];' >> $@.in
-	$(hide) $(foreach p,$(PRIVATE_PRODUCTS), \
-	  $(foreach d,$(PRODUCTS.$(strip $(p)).INHERITS_FROM), echo \"$(d)\" -\> \"$(p)\" >> $@.in;))
-	$(foreach p,$(PRIVATE_PRODUCTS),$(call emit-product-node-props,$(p),$@.in))
-	$(hide) echo '}' >> $@.in
-	$(hide) build/make/tools/filter-product-graph.py $(PRIVATE_PRODUCTS_FILTER) < $@.in > $@
-else
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
-
-ifeq (,$(RBC_PRODUCT_CONFIG)$(RBC_NO_PRODUCT_GRAPH)$(RBC_BOARD_CONFIG))
+$(products_graph): PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT := $(call gather-all-makefiles-for-current-product)
+$(products_graph): PRIVATE_TOP_LEVEL_MAKEFILE := $(INTERNAL_PRODUCT)
+$(products_graph):
+	@echo Product graph DOT: $@ for $(PRIVATE_TOP_LEVEL_MAKEFILE)
+	$(hide) echo 'digraph {' > $@
+	$(hide) echo 'graph [ ratio=.5 ];' >> $@
+	$(hide) $(foreach p,$(PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT), \
+	  $(foreach d,$(PRODUCTS.$(strip $(p)).INHERITS_FROM), echo \"$(d)\" -\> \"$(p)\" >> $@;))
+	$(foreach p,$(PRIVATE_ALL_MAKEFILES_FOR_THIS_PRODUCT),$(call emit-product-node-props,$(p),$@))
+	$(hide) echo '}' >> $@
 
 .PHONY: product-graph
 product-graph: $(products_graph)
 	@echo Product graph .dot file: $(products_graph)
 	@echo Command to convert to pdf: dot -Tpdf -Nshape=box -o $(OUT_DIR)/products.pdf $(products_graph)
 	@echo Command to convert to svg: dot -Tsvg -Nshape=box -o $(OUT_DIR)/products.svg $(products_graph)
-else
-.PHONY: product-graph
-	@echo RBC_PRODUCT_CONFIG and RBC_NO_PRODUCT_GRAPH should be unset to generate product graph
-	false
-endif
diff --git a/core/product.mk b/core/product.mk
index 1f304cd..277fa74 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -136,10 +136,7 @@
 # PRODUCT_BOOT_JARS, so that device-specific jars go after common jars.
 _product_list_vars += PRODUCT_BOOT_JARS_EXTRA
 
-_product_single_value_vars += PRODUCT_SUPPORTS_BOOT_SIGNER
 _product_single_value_vars += PRODUCT_SUPPORTS_VBOOT
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY
-_product_single_value_vars += PRODUCT_SUPPORTS_VERITY_FEC
 _product_list_vars += PRODUCT_SYSTEM_SERVER_APPS
 # List of system_server classpath jars on the platform.
 _product_list_vars += PRODUCT_SYSTEM_SERVER_JARS
@@ -168,7 +165,6 @@
 _product_list_vars += PRODUCT_LOADED_BY_PRIVILEGED_MODULES
 _product_single_value_vars += PRODUCT_VBOOT_SIGNING_KEY
 _product_single_value_vars += PRODUCT_VBOOT_SIGNING_SUBKEY
-_product_single_value_vars += PRODUCT_VERITY_SIGNING_KEY
 _product_single_value_vars += PRODUCT_SYSTEM_VERITY_PARTITION
 _product_single_value_vars += PRODUCT_VENDOR_VERITY_PARTITION
 _product_single_value_vars += PRODUCT_PRODUCT_VERITY_PARTITION
@@ -360,15 +356,12 @@
 # This option is only meant to be set by compliance GSI targets.
 _product_single_value_vars += PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
 
-# If set, metadata files for the following artifacts will be generated.
-# - system/framework/*.jar
-# - system/framework/oat/<arch>/*.{oat,vdex,art}
-# - system/etc/boot-image.prof
-# - system/etc/dirty-image-objects
-# One fsverity metadata container file per one input file will be generated in
-# system.img, with a suffix ".fsv_meta". e.g. a container file for
-# "/system/framework/foo.jar" will be "system/framework/foo.jar.fsv_meta".
-_product_single_value_vars += PRODUCT_SYSTEM_FSVERITY_GENERATE_METADATA
+# If set, fsverity metadata files will be generated for each files in the
+# allowlist, plus an manifest APK per partition. For example,
+# /system/framework/service.jar will come with service.jar.fsv_meta in the same
+# directory; the file information will also be included in
+# /system/etc/security/fsverity/BuildManifest.apk
+_product_single_value_vars += PRODUCT_FSVERITY_GENERATE_METADATA
 
 # If true, sets the default for MODULE_BUILD_FROM_SOURCE. This overrides
 # BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE but not an explicitly set value.
@@ -377,17 +370,6 @@
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
-define dump-product
-$(warning ==== $(1) ====)\
-$(foreach v,$(_product_var_list),\
-$(warning PRODUCTS.$(1).$(v) := $(call get-product-var,$(1),$(v))))\
-$(warning --------)
-endef
-
-define dump-products
-$(foreach p,$(PRODUCTS),$(call dump-product,$(p)))
-endef
-
 #
 # Functions for including product makefiles
 #
@@ -396,12 +378,11 @@
 # $(1): product to inherit
 #
 # To be called from product makefiles, and is later evaluated during the import-nodes
-# call below. It does three things:
+# call below. It does the following:
 #  1. Inherits all of the variables from $1.
 #  2. Records the inheritance in the .INHERITS_FROM variable
-#  3. Records the calling makefile in PARENT_PRODUCT_FILES
 #
-# (2) and (3) can be used together to reconstruct the include hierarchy
+# (2) and the PRODUCTS variable can be used together to reconstruct the include hierarchy
 # See e.g. product-graph.mk for an example of this.
 #
 define inherit-product
@@ -416,8 +397,7 @@
     $(eval current_mk := $(strip $(word 1,$(_include_stack)))) \
     $(eval inherit_var := PRODUCTS.$(current_mk).INHERITS_FROM) \
     $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
-    $(eval PARENT_PRODUCT_FILES := $(sort $(PARENT_PRODUCT_FILES) $(current_mk))) \
-    $(call dump-inherit,$(strip $(word 1,$(_include_stack))),$(1)) \
+    $(call dump-inherit,$(current_mk),$(1)) \
     $(call dump-config-vals,$(current_mk),inherit))
 endef
 
@@ -466,64 +446,18 @@
 
 
 #
-# Does various consistency checks on all of the known products.
+# Does various consistency checks on the current product.
 # Takes no parameters, so $(call ) is not necessary.
 #
-define check-all-products
+define check-current-product
 $(if ,, \
-  $(eval _cap_names :=) \
-  $(foreach p,$(PRODUCTS), \
-    $(eval pn := $(strip $(PRODUCTS.$(p).PRODUCT_NAME))) \
-    $(if $(pn),,$(error $(p): PRODUCT_NAME must be defined.)) \
-    $(if $(filter $(pn),$(_cap_names)), \
-      $(error $(p): PRODUCT_NAME must be unique; "$(pn)" already used by $(strip \
-          $(foreach \
-            pp,$(PRODUCTS),
-              $(if $(filter $(pn),$(PRODUCTS.$(pp).PRODUCT_NAME)), \
-                $(pp) \
-               ))) \
-       ) \
-     ) \
-    $(eval _cap_names += $(pn)) \
-    $(if $(call is-c-identifier,$(pn)),, \
-      $(error $(p): PRODUCT_NAME must be a valid C identifier, not "$(pn)") \
-     ) \
-    $(eval pb := $(strip $(PRODUCTS.$(p).PRODUCT_BRAND))) \
-    $(if $(pb),,$(error $(p): PRODUCT_BRAND must be defined.)) \
-    $(foreach cf,$(strip $(PRODUCTS.$(p).PRODUCT_COPY_FILES)), \
-      $(if $(filter 2 3,$(words $(subst :,$(space),$(cf)))),, \
-        $(error $(p): malformed COPY_FILE "$(cf)") \
-       ) \
-     ) \
-   ) \
-)
-endef
-
-
-#
-# Returns the product makefile path for the product with the provided name
-#
-# $(1): short product name like "generic"
-#
-define _resolve-short-product-name
-  $(eval pn := $(strip $(1)))
-  $(eval p := \
-      $(foreach p,$(PRODUCTS), \
-          $(if $(filter $(pn),$(PRODUCTS.$(p).PRODUCT_NAME)), \
-            $(p) \
-       )) \
-   )
-  $(eval p := $(sort $(p)))
-  $(if $(filter 1,$(words $(p))), \
-    $(p), \
-    $(if $(filter 0,$(words $(p))), \
-      $(error No matches for product "$(pn)"), \
-      $(error Product "$(pn)" ambiguous: matches $(p)) \
-    ) \
-  )
-endef
-define resolve-short-product-name
-$(strip $(call _resolve-short-product-name,$(1)))
+  $(if $(call is-c-identifier,$(PRODUCT_NAME)),, \
+    $(error $(INTERNAL_PRODUCT): PRODUCT_NAME must be a valid C identifier, not "$(pn)")) \
+  $(if $(PRODUCT_BRAND),, \
+    $(error $(INTERNAL_PRODUCT): PRODUCT_BRAND must be defined.)) \
+  $(foreach cf,$(strip $(PRODUCT_COPY_FILES)), \
+    $(if $(filter 2 3,$(words $(subst :,$(space),$(cf)))),, \
+      $(error $(p): malformed COPY_FILE "$(cf)"))))
 endef
 
 # BoardConfig variables that are also inherited in product mks. Should ideally
diff --git a/core/product_config.mk b/core/product_config.mk
index be4aded..e03ae2b 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -208,38 +208,27 @@
 )
 
 # Dedup, extract product names, etc.
-product_paths :=$(sort $(product_paths))
-all_named_products := $(call _first,$(product_paths),:)
-all_product_makefiles := $(call _second,$(product_paths),:)
+product_paths := $(sort $(product_paths))
+all_named_products := $(sort $(call _first,$(product_paths),:))
 current_product_makefile := $(call _second,$(filter $(TARGET_PRODUCT):%,$(product_paths)),:)
 COMMON_LUNCH_CHOICES := $(sort $(common_lunch_choices))
 
-load_all_product_makefiles :=
-ifneq (,$(filter product-graph, $(MAKECMDGOALS)))
-ifeq ($(ANDROID_PRODUCT_GRAPH),--all)
-load_all_product_makefiles := true
-endif
-endif
-ifneq (,$(filter dump-products,$(MAKECMDGOALS)))
-ifeq ($(ANDROID_DUMP_PRODUCTS),all)
-load_all_product_makefiles := true
-endif
-endif
+# Check that there are no duplicate product names
+$(foreach p,$(all_named_products), \
+  $(if $(filter 1,$(words $(filter $(p):%,$(product_paths)))),, \
+    $(error Product name must be unique, "$(p)" used by $(call _second,$(filter $(p):%,$(product_paths)),:))))
 
 ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
 _product_config_saved_KATI_ALLOW_RULES := $(.KATI_ALLOW_RULES)
 .KATI_ALLOW_RULES := $(ALLOW_RULES_IN_PRODUCT_CONFIG)
 endif
 
-ifeq ($(load_all_product_makefiles),true)
-# Import all product makefiles.
-$(call import-products, $(all_product_makefiles))
-else
-# Import just the current product.
-$(if $(current_product_makefile),,$(error Can not locate config makefile for product "$(TARGET_PRODUCT)"))
+ifeq (,$(current_product_makefile))
+  $(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
+endif
+
 ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
   RBC_PRODUCT_CONFIG := true
-  RBC_BOARD_CONFIG := true
 endif
 
 ifndef RBC_PRODUCT_CONFIG
@@ -257,53 +246,32 @@
     $(error product configuration converter failed: $(.SHELLSTATUS))
   endif
   include $(OUT_DIR)/rbc/rbc_product_config_results.mk
-  PRODUCTS += $(current_product_makefile)
-endif
-endif  # Import all or just the current product makefile
-
-ifndef RBC_PRODUCT_CONFIG
-# Quick check
-$(check-all-products)
 endif
 
-ifeq ($(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
+# This step was already handled in the RBC product configuration.
+ifeq ($(RBC_PRODUCT_CONFIG)$(SKIP_ARTIFACT_PATH_REQUIREMENT_PRODUCTS_CHECK),)
 # Import all the products that have made artifact path requirements, so that we can verify
-# the artifacts they produce.
-# These are imported after check-all-products because some of them might not be real products.
+# the artifacts they produce. They might be intermediate makefiles instead of real products.
 $(foreach makefile,$(ARTIFACT_PATH_REQUIREMENT_PRODUCTS),\
   $(if $(filter-out $(makefile),$(PRODUCTS)),$(eval $(call import-products,$(makefile))))\
 )
 endif
 
+INTERNAL_PRODUCT := $(current_product_makefile)
+# Strip and assign the PRODUCT_ variables.
+$(call strip-product-vars)
+
+# Quick check
+$(check-current-product)
+
 ifneq ($(ALLOW_RULES_IN_PRODUCT_CONFIG),)
 .KATI_ALLOW_RULES := $(_saved_KATI_ALLOW_RULES)
 _product_config_saved_KATI_ALLOW_RULES :=
 endif
 
-ifneq ($(filter dump-products, $(MAKECMDGOALS)),)
-$(dump-products)
-endif
-
-ifndef RBC_PRODUCT_CONFIG
-# Convert a short name like "sooner" into the path to the product
-# file defining that product.
-#
-INTERNAL_PRODUCT := $(call resolve-short-product-name, $(TARGET_PRODUCT))
-ifneq ($(current_product_makefile),$(INTERNAL_PRODUCT))
-$(error PRODUCT_NAME inconsistent in $(current_product_makefile) and $(INTERNAL_PRODUCT))
-endif
-
-
 ############################################################################
-# Strip and assign the PRODUCT_ variables.
-$(call strip-product-vars)
-else
-INTERNAL_PRODUCT := $(current_product_makefile)
-endif
 
 current_product_makefile :=
-all_product_makefiles :=
-all_product_configs :=
 
 #############################################################################
 # Quick check and assign default values
@@ -461,7 +429,7 @@
 
 # Show a warning wall of text if non-compliance-GSI products set this option.
 ifdef PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT
-  ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64,$(PRODUCT_NAME)))
+  ifeq (,$(filter gsi_arm gsi_arm64 gsi_x86 gsi_x86_64 gsi_car_arm64 gsi_car_x86_64 gsi_tv_arm gsi_tv_arm64,$(PRODUCT_NAME)))
     $(warning PRODUCT_INSTALL_DEBUG_POLICY_TO_SYSTEM_EXT is set but \
       PRODUCT_NAME ($(PRODUCT_NAME)) doesn't look like a GSI for compliance \
       testing. This is a special configuration for compliance GSI, so do make \
@@ -505,6 +473,9 @@
   ifneq (,$(call math_gt_or_eq,29,$(PRODUCT_SHIPPING_API_LEVEL)))
     PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29)
   endif
+  ifneq (,$(call math_gt_or_eq,33,$(PRODUCT_SHIPPING_API_LEVEL)))
+    PRODUCT_PACKAGES += $(PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33)
+  endif
 endif
 
 # If build command defines OVERRIDE_PRODUCT_EXTRA_VNDK_VERSIONS,
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 192e409..7a5e501 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -70,14 +70,7 @@
 
 def _printvars(state):
     """Prints configuration and global variables."""
-    (globals, cfg, globals_base) = state
-    for attr, val in sorted(cfg.items()):
-        __print_attr(attr, val)
-    if _options.print_globals:
-        print()
-        _printglobals(globals, globals_base)
-
-def _printglobals(globals, globals_base):
+    (globals, globals_base) = state
     for attr, val in sorted(globals.items()):
         if attr == _soong_config_namespaces_key:
             __print_attr("SOONG_CONFIG_NAMESPACES", val.keys())
@@ -111,6 +104,11 @@
     seen = {item: 0 for item in value_list}
     return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
 
+def __sort_pcm_names(pcm_names):
+    # We have to add an extension back onto the pcm names when sorting,
+    # or else the sort order could be wrong when one is a prefix of another.
+    return [x[:-3] for x in sorted([y + ".mk" for y in pcm_names], reverse=True)]
+
 def _product_configuration(top_pcm_name, top_pcm, input_variables_init):
     """Creates configuration."""
 
@@ -127,25 +125,19 @@
 
     globals, globals_base = _init_globals(input_variables_init)
 
-    config_postfix = []  # Configs in postfix order
-
     # Each PCM is represented by a quadruple of function, config, children names
     # and readyness (that is, the configurations from inherited PCMs have been
     # substituted).
     configs = {top_pcm_name: (top_pcm, None, [], False)}  # All known PCMs
 
-    stash = []  # Configs to push once their descendants are done
-
-    # Stack containing PCMs to be processed. An item in the stack
-    # is a pair of PCMs name and its height in the product inheritance tree.
-    pcm_stack = [(top_pcm_name, 0)]
-    pcm_count = 0
+    # Stack containing PCMs to be processed
+    pcm_stack = [top_pcm_name]
 
     # Run it until pcm_stack is exhausted, but no more than N times
     for n in range(1000):
         if not pcm_stack:
             break
-        (name, height) = pcm_stack.pop()
+        name = pcm_stack.pop()
         pcm, cfg, c, _ = configs[name]
 
         # cfg is set only after PCM has been called, leverage this
@@ -153,50 +145,101 @@
         if cfg != None:
             continue
 
-        # Push ancestors until we reach this node's height
-        config_postfix.extend([stash.pop() for i in range(len(stash) - height)])
-
         # Run this one, obtaining its configuration and child PCMs.
         if _options.trace_modules:
-            print("#%d: %s" % (n, name))
+            rblf_log("%d: %s" % (n, name))
 
         # Run PCM.
         handle = __h_new()
         pcm(globals, handle)
 
+        if handle.artifact_path_requirements:
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENTS"] = handle.artifact_path_requirements
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_ALLOWED_LIST"] = handle.artifact_path_allowed_list
+            globals["PRODUCTS."+name+".mk.ARTIFACT_PATH_REQUIREMENT_IS_RELAXED"] = "true" if handle.artifact_path_requirement_is_relaxed[0] else ""
+            globals.setdefault("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", [])
+            globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] = sorted(globals["ARTIFACT_PATH_REQUIREMENT_PRODUCTS"] + [name+".mk"])
+
+        if handle.product_enforce_packages_exist[0]:
+            globals["PRODUCTS."+name+".mk.PRODUCT_ENFORCE_PACKAGES_EXIST"] = "true"
+            globals["PRODUCTS."+name+".mk.PRODUCT_ENFORCE_PACKAGES_EXIST_ALLOW_LIST"] = handle.product_enforce_packages_exist_allow_list
+
         # Now we know everything about this PCM, record it in 'configs'.
         children = handle.inherited_modules
         if _options.trace_modules:
-            print("#   ", "    ".join(sorted(children.keys())))
-        configs[name] = (pcm, handle.cfg, sorted(children.keys()), False)
-        pcm_count = pcm_count + 1
+            rblf_log("   ", "    ".join(children.keys()))
+        # Starlark dictionaries are guaranteed to iterate through in insertion order,
+        # so children.keys() will be ordered by the inherit() calls
+        configs[name] = (pcm, handle.cfg, children.keys(), False)
 
-        if len(children) == 0:
-            # Leaf PCM goes straight to the config_postfix
-            config_postfix.append(name)
-            continue
-
-        # Stash this PCM, process children in the sorted order
-        stash.append(name)
-        for child_name in sorted(children, reverse = True):
+        for child_name in __sort_pcm_names(children.keys()):
             if child_name not in configs:
                 configs[child_name] = (children[child_name], None, [], False)
-            pcm_stack.append((child_name, len(stash)))
+            pcm_stack.append(child_name)
     if pcm_stack:
         fail("Inheritance processing took too many iterations")
 
-    # Flush the stash
-    config_postfix.extend([stash.pop() for i in range(len(stash))])
-    if len(config_postfix) != pcm_count:
-        fail("Ran %d modules but postfix tree has only %d entries" % (pcm_count, len(config_postfix)))
+    for pcm_name in globals.get("ARTIFACT_PATH_REQUIREMENT_PRODUCTS", []):
+        for var, val in evaluate_finalized_product_variables(configs, pcm_name[:-3]).items():
+            globals["PRODUCTS."+pcm_name+"."+var] = val
 
-    if _options.trace_modules:
-        print("\n#---Postfix---")
-        for x in config_postfix:
-            print("#   ", x)
+    # Copy product config variables from the cfg dictionary to the
+    # PRODUCTS.<top_level_makefile_name>.<var_name> global variables.
+    for var, val in evaluate_finalized_product_variables(configs, top_pcm_name, _options.trace_modules).items():
+        globals["PRODUCTS."+top_pcm_name+".mk."+var] = val
+
+    # Record inheritance hierarchy in PRODUCTS.<file>.INHERITS_FROM variables.
+    # This is required for m product-graph.
+    for config in configs:
+        if len(configs[config][2]) > 0:
+            globals["PRODUCTS."+config+".mk.INHERITS_FROM"] = sorted([x + ".mk" for x in configs[config][2]])
+    globals["PRODUCTS"] = __words(globals.get("PRODUCTS", [])) + [top_pcm_name + ".mk"]
+
+    return (globals, globals_base)
+
+def evaluate_finalized_product_variables(configs, top_level_pcm_name, trace=False):
+    configs_postfix = []
+    pcm_stack = [(top_level_pcm_name, True)]
+    for i in range(1000):
+        if not pcm_stack:
+            break
+
+        pcm_name, before = pcm_stack.pop()
+        if before:
+            pcm_stack.append((pcm_name, False))
+            for child in __sort_pcm_names(configs[pcm_name][2]):
+                pcm_stack.append((child, True))
+        else:
+            configs_postfix.append(pcm_name)
+    if pcm_stack:
+        fail("Inheritance processing took too many iterations")
+
+    # clone the configs, because in the process of evaluating the
+    # final cfg dictionary we will remove values from the intermediate
+    # cfg dictionaries. We need to be able to call evaluate_finalized_product_variables()
+    # multiple times, so we can't change the origional configs object.
+    cloned_configs = {}
+    for pcm_name in configs:
+        # skip unneeded pcms
+        if pcm_name not in configs_postfix:
+            continue
+        pcm, cfg, children_names, ready = configs[pcm_name]
+        cloned_cfg = {}
+        for var, val in cfg.items():
+            if type(val) == 'list':
+                cloned_cfg[var] = list(val)
+            else:
+                cloned_cfg[var] = val
+        cloned_configs[pcm_name] = (pcm, cloned_cfg, children_names, ready)
+    configs = cloned_configs
+
+    if trace:
+        rblf_log("\n---Postfix---")
+        for x in configs_postfix:
+            rblf_log("   ", x)
 
     # Traverse the tree from the bottom, evaluating inherited values
-    for pcm_name in config_postfix:
+    for pcm_name in configs_postfix:
         pcm, cfg, children_names, ready = configs[pcm_name]
 
         # Should run
@@ -215,9 +258,7 @@
         _substitute_inherited(configs, pcm_name, cfg)
         _percolate_inherited(configs, pcm_name, cfg, children_names)
         configs[pcm_name] = pcm, cfg, children_names, True
-
-    return (globals, configs[top_pcm_name][1], globals_base)
-
+    return configs[top_level_pcm_name][1]
 
 def _dictionary_difference(a, b):
     result = {}
@@ -235,7 +276,14 @@
     input_variables_init(globals_base, h_base)
     input_variables_init(globals, h)
     board_config_init(globals, h)
-    return (globals, _dictionary_difference(h.cfg, h_base.cfg), globals_base)
+
+    # Board configuration files aren't really supposed to change
+    # product configuration variables, but some do. You lose the
+    # inheritance features of the product config variables if you do.
+    for var, value in _dictionary_difference(h.cfg, h_base.cfg).items():
+        globals[var] = value
+
+    return (globals, globals_base)
 
 
 def _substitute_inherited(configs, pcm_name, cfg):
@@ -261,7 +309,7 @@
             old_val = val
             new_val = _value_expand(configs, attr, val)
             if new_val != old_val:
-                print("%s(i): %s=%s (was %s)" % (pcm_name, attr, new_val, old_val))
+                rblf_log("%s(i): %s=%s (was %s)" % (pcm_name, attr, new_val, old_val))
             cfg[attr] = new_val
 
 def _value_expand(configs, attr, values_list):
@@ -291,12 +339,6 @@
         child_cfg = configs[child_name][1]
         for attr, value in child_cfg.items():
             if type(value) != "list":
-                # Single value variables take the first value available from the leftmost
-                # branch of the tree. If we also had "or attr in percolated_attrs" in this
-                # if statement, it would take the value from the rightmost branch.
-                if cfg.get(attr, "") == "":
-                    cfg[attr] = value
-                    percolated_attrs[attr] = True
                 continue
             if attr in percolated_attrs:
                 # We already are percolating this one, just add this list
@@ -306,9 +348,22 @@
                 cfg[attr] = []
                 __move_items(cfg[attr], child_cfg, attr)
 
+    # single value variables need to be inherited in alphabetical order,
+    # not in the order of inherit() calls.
+    for child_name in sorted(children_names):
+        child_cfg = configs[child_name][1]
+        for attr, value in child_cfg.items():
+            if type(value) != "list":
+                # Single value variables take the first value available from the leftmost
+                # branch of the tree. If we also had "or attr in percolated_attrs" in this
+                # if statement, it would take the value from the rightmost branch.
+                if cfg.get(attr, "") == "":
+                    cfg[attr] = value
+                    percolated_attrs[attr] = True
+
     for attr in _options.trace_variables:
         if attr in percolated_attrs:
-            print("%s: %s^=%s" % (cfg_name, attr, cfg[attr]))
+            rblf_log("%s: %s^=%s" % (cfg_name, attr, cfg[attr]))
 
 def __move_items(to_list, from_cfg, attr):
     value = from_cfg.get(attr, [])
@@ -350,10 +405,26 @@
     """Gets to the value of the variable in the namespace."""
     return g.get(_soong_config_namespaces_key, {}).get(nsname, {}).get(var, None)
 
-
-def _abspath(path):
+def _abspath(paths):
     """Provided for compatibility, to be removed later."""
-    return path
+    cwd = rblf_shell('pwd')
+    results = []
+    for path in __words(paths):
+        if path[0] != "/":
+            path = cwd + "/" + path
+
+        resultparts = []
+        for part in path.split('/'):
+            if part == "." or part == "":
+                continue
+            elif part == "..":
+                if resultparts:
+                    resultparts.pop()
+            else:
+                resultparts.append(part)
+        results.append("/" + "/".join(resultparts))
+
+    return " ".join(results)
 
 
 def _addprefix(prefix, string_or_list):
@@ -385,7 +456,7 @@
 
 def __words(string_or_list):
     if type(string_or_list) == "list":
-        return string_or_list
+        string_or_list = " ".join(string_or_list)
     return _mkstrip(string_or_list).split()
 
 # Handle manipulation functions.
@@ -398,7 +469,12 @@
     return struct(
         cfg = dict(),
         inherited_modules = dict(),
-        default_list_value = list()
+        default_list_value = list(),
+        artifact_path_requirements = list(),
+        artifact_path_allowed_list = list(),
+        artifact_path_requirement_is_relaxed = [False], # as a list so that we can reassign it
+        product_enforce_packages_exist = [False],
+        product_enforce_packages_exist_allow_list = [],
     )
 
 def __h_cfg(handle):
@@ -439,10 +515,6 @@
     """Returns basename."""
     return path.rsplit("/",1)[-1]
 
-def __dir(path):
-    """Returns dirname."""
-    return path.rsplit("/",1)[0]
-
 def _board_platform_in(g, string_or_list):
     """Returns true if board is in the list."""
     board = g.get("TARGET_BOARD_PLATFORM","")
@@ -464,14 +536,17 @@
     """If from file exists, returns [from:to] pair."""
     value = path_pair.split(":", 2)
 
+    if value[0].find('*') != -1:
+        fail("copy_if_exists: input file cannot contain *")
+
     # Check that l[0] exists
-    return [":".join(value)] if rblf_file_exists(value[0]) else []
+    return [":".join(value)] if rblf_wildcard(value[0]) else []
 
-def _enforce_product_packages_exist(pkg_string_or_list):
+def _enforce_product_packages_exist(handle, pkg_string_or_list=[]):
     """Makes including non-existent modules in PRODUCT_PACKAGES an error."""
-
-    #TODO(asmundak)
-    pass
+    handle.product_enforce_packages_exist[0] = True
+    handle.product_enforce_packages_exist_allow_list.clear()
+    handle.product_enforce_packages_exist_allow_list.extend(__words(pkg_string_or_list))
 
 def _add_product_dex_preopt_module_config(handle, modules, config):
     """Equivalent to add-product-dex-preopt-module-config from build/make/core/product.mk."""
@@ -480,10 +555,6 @@
     _setdefault(handle, "PRODUCT_DEX_PREOPT_MODULE_CONFIGS")
     handle.cfg["PRODUCT_DEX_PREOPT_MODULE_CONFIGS"] += [m + "=" + config for m in modules]
 
-def _file_wildcard_exists(file_pattern):
-    """Return True if there are files matching given bash pattern."""
-    return len(rblf_wildcard(file_pattern)) > 0
-
 def _find_and_copy(pattern, from_dir, to_dir):
     """Return a copy list for the files matching the pattern."""
     return sorted([("%s/%s:%s/%s" % (from_dir, f, to_dir, f))
@@ -504,10 +575,15 @@
     Return:
         list of words
     """
-    rex = __mk2regex(__words(pattern))
+    patterns = [__mkparse_pattern(x) for x in __words(pattern)]
     res = []
     for w in __words(text):
-        if not _regex_match(rex, w):
+        match = False
+        for p in patterns:
+            if __mkpattern_matches(p, w):
+                match = True
+                break
+        if not match:
             res.append(w)
     return res
 
@@ -519,13 +595,43 @@
          which stands for any sequence of characters.
         text: string or list of words.
     """
-    rex = __mk2regex(__words(pattern))
+    patterns = [__mkparse_pattern(x) for x in __words(pattern)]
     res = []
     for w in __words(text):
-        if _regex_match(rex, w):
-            res.append(w)
+        for p in patterns:
+            if __mkpattern_matches(p, w):
+                res.append(w)
+                break
     return res
 
+def _first_word(input):
+    """Equivalent to the GNU make function $(firstword)."""
+    input = __words(input)
+    if len(input) == 0:
+        return ""
+    return input[0]
+
+def _last_word(input):
+    """Equivalent to the GNU make function $(lastword)."""
+    input = __words(input)
+    l = len(input)
+    if l == 0:
+        return ""
+    return input[l-1]
+
+def _flatten_2d_list(list):
+    result = []
+    for x in list:
+        result += x
+    return result
+
+def _dir(paths):
+    """Equivalent to the GNU make function $(dir).
+
+    Returns the folder of the file for each path in paths.
+    """
+    return " ".join([w.rsplit("/",1)[0] for w in __words(paths)])
+
 def _notdir(paths):
     """Equivalent to the GNU make function $(notdir).
 
@@ -533,22 +639,17 @@
     """
     return " ".join([__base(w) for w in __words(paths)])
 
-def __mk2regex(words):
-    """Returns regular expression equivalent to Make pattern."""
+def _require_artifacts_in_path(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path in Make."""
+    handle.artifact_path_requirements.clear()
+    handle.artifact_path_requirements.extend(__words(paths))
+    handle.artifact_path_allowed_list.clear()
+    handle.artifact_path_allowed_list.extend(__words(allowed_paths))
 
-    # TODO(asmundak): this will mishandle '\%'
-    return "^(" + "|".join([w.replace("%", ".*", 1) for w in words if w]) + ")$"
-
-def _regex_match(regex, w):
-    return rblf_regex(regex, w)
-
-def _require_artifacts_in_path(paths, allowed_paths):
-    """TODO."""
-    pass
-
-def _require_artifacts_in_path_relaxed(paths, allowed_paths):
-    """TODO."""
-    pass
+def _require_artifacts_in_path_relaxed(handle, paths, allowed_paths):
+    """Equivalent to require-artifacts-in-path-relaxed in Make."""
+    _require_artifacts_in_path(handle, paths, allowed_paths)
+    handle.artifact_path_requirement_is_relaxed[0] = True
 
 def _expand_wildcard(pattern):
     """Expands shell wildcard pattern."""
@@ -598,7 +699,11 @@
 
 
 def __mkparse_pattern(pattern):
-    """Parses Make's patsubst pattern."""
+    """Parses Make's patsubst pattern.
+
+    This is equivalent to pattern.split('%', 1), except it
+    also takes into account escaping the % symbols.
+    """
     in_escape = False
     res = []
     acc = ""
@@ -618,6 +723,21 @@
     res.append(acc)
     return res
 
+def __mkpattern_matches(pattern, word):
+    """Returns if a pattern matches a given word.
+
+    The pattern must be a list of strings of length at most 2.
+    This checks if word is either equal to the pattern or
+    starts/ends with the two parts of the pattern.
+    """
+    if len(pattern) > 2:
+        fail("Pattern can have at most 2 components")
+    elif len(pattern) == 1:
+        return pattern[0]==word
+    else:
+        return ((len(word) >= len(pattern[0])+len(pattern[1]))
+            and word.startswith(pattern[0])
+            and word.endswith(pattern[1]))
 
 def __mkpatsubst_word(parsed_pattern,parsed_subst, word):
     (before, after) = parsed_pattern
@@ -639,23 +759,39 @@
     $1 in regex terms).
     """
     parsed_pattern = __mkparse_pattern(pattern)
-    words = s if type(s) == "list" else _mkstrip(s).split(" ")
     if len(parsed_pattern) == 1:
-        out_words = [ replacement if x == pattern else x for x in words]
+        out_words = [ replacement if x == pattern else x for x in __words(s)]
     else:
         parsed_replacement = __mkparse_pattern(replacement)
-        out_words = [__mkpatsubst_word(parsed_pattern, parsed_replacement, x) for x in words]
+        out_words = [__mkpatsubst_word(parsed_pattern, parsed_replacement, x) for x in __words(s)]
     return out_words if type(s) == "list" else " ".join(out_words)
 
 
+def _mksort(input):
+    """Emulate Make's sort.
+
+    This is unique from a regular sort in that it also strips
+    the input, and removes duplicate words from the input.
+    """
+    input = sorted(__words(input))
+    result = []
+    for w in input:
+        if len(result) == 0 or result[-1] != w:
+            result.append(w)
+    return result
+
+
 def _mkstrip(s):
     """Emulates Make's strip.
 
     That is, removes string's leading and trailing whitespace characters and
     replaces any sequence of whitespace characters with with a single space.
     """
-    if type(s) != "string":
-        return s
+    t = type(s)
+    if t == "list":
+        s = " ".join(s)
+    elif t != "string":
+        fail("Argument to mkstrip must be a string or list, got: "+t)
     result = ""
     was_space = False
     for ch in s.strip().elems():
@@ -698,7 +834,6 @@
     """Returns struct containing runtime global settings."""
     settings = dict(
         format = "pretty",
-        print_globals = False,
         rearrange = "",
         trace_modules = False,
         trace_variables = [],
@@ -712,7 +847,8 @@
         elif x == "pretty" or x == "make":
             settings["format"] = x
         elif x == "global":
-            settings["print_globals"] = True
+            # TODO: Remove this, kept for backwards compatibility
+            pass
         elif x != "":
             fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
     for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
@@ -740,15 +876,16 @@
     copy_files = _copy_files,
     copy_if_exists = _copy_if_exists,
     cfg = __h_cfg,
-    dir = __dir,
+    dir = _dir,
     enforce_product_packages_exist = _enforce_product_packages_exist,
     expand_wildcard = _expand_wildcard,
-    file_exists = rblf_file_exists,
-    file_wildcard_exists = _file_wildcard_exists,
     filter = _filter,
     filter_out = _filter_out,
     find_and_copy = _find_and_copy,
     findstring = _findstring,
+    first_word = _first_word,
+    last_word = _last_word,
+    flatten_2d_list = _flatten_2d_list,
     inherit = _inherit,
     indirect = _indirect,
     mk2rbc_error = _mk2rbc_error,
@@ -757,11 +894,11 @@
     mkerror = _mkerror,
     mkpatsubst = _mkpatsubst,
     mkwarning = _mkwarning,
+    mksort = _mksort,
     mkstrip = _mkstrip,
     mksubst = _mksubst,
     notdir = _notdir,
     printvars = _printvars,
-    printglobals = _printglobals,
     product_configuration = _product_configuration,
     board_configuration = _board_configuration,
     product_copy_files_by_pattern = _product_copy_files_by_pattern,
diff --git a/core/proguard.flags b/core/proguard.flags
index 185275e..53f63d8 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,10 +9,21 @@
 # Add this flag in your package's own configuration if it's needed.
 #-flattenpackagehierarchy
 
-# Keep classes and methods that have the guava @VisibleForTesting annotation
--keep @**.VisibleForTesting class *
--keepclassmembers class * {
-@**.VisibleForTesting *;
+# Keep classes and methods that have @VisibleForTesting annotations, except in
+# intermediate libraries that export those annotations (e.g., androidx, guava).
+# This avoids keeping library-specific test code that isn't actually needed
+# for platform testing.
+# TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
+# and com.google.common.annotations.VisibleForTesting use in platform code.
+-keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
+-keepclassmembers class !androidx.**,!com.google.common.**,* {
+    @**.VisibleForTesting *;
+}
+
+# Keep rule for members that are needed solely to keep alive downstream weak
+# references, and could otherwise be removed after tree shaking optimizations.
+-keepclassmembers,allowaccessmodification,allowobfuscation,allowshrinking class * {
+  @com.android.internal.annotations.KeepForWeakReference <fields>;
 }
 
 # Understand the common @Keep annotation from various Android packages:
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 30c2341..7e7b270 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -2,6 +2,14 @@
 # that isn't explicitly part of the API
 -dontskipnonpubliclibraryclasses -dontskipnonpubliclibraryclassmembers
 
+# Preserve line number information for debugging stack traces.
+-keepattributes SourceFile,LineNumberTable
+
+# Annotations are implemented as attributes, so we have to explicitly keep them.
+# Keep all runtime-visible annotations like RuntimeVisibleParameterAnnotations
+# and RuntimeVisibleTypeAnnotations, as well as associated defaults.
+-keepattributes RuntimeVisible*Annotation*,AnnotationDefault
+
 # For enumeration classes, see http://proguard.sourceforge.net/manual/examples.html#enumerations
 -keepclassmembers enum * {
     public static **[] values();
@@ -48,7 +56,7 @@
 # -keep class * extends android.app.BackupAgent
 
 # Parcelable CREATORs must be kept for Parcelable functionality
--keep class * implements android.os.Parcelable {
+-keepclassmembers class * implements android.os.Parcelable {
   public static final ** CREATOR;
 }
 
@@ -70,9 +78,23 @@
 # has a fallback, but again, don't use Futures.getChecked on Android regardless.
 -dontwarn java.lang.ClassValue
 
+# Ignore missing annotation references for various support libraries.
+# While this is not ideal, it should be relatively safe given that
+# 1) runtime-visible annotations will still be kept, and 2) compile-time
+# annotations are stripped by R8 anyway.
+# Note: The ** prefix is used to accommodate jarjar repackaging.
+# TODO(b/242088131): Remove these exemptions after resolving transitive libs
+# dependencies that are provided to R8.
+-dontwarn **android**.annotation*.**
+-dontwarn **com.google.errorprone.annotations.**
+-dontwarn javax.annotation.**
+-dontwarn org.checkerframework.**
+-dontwarn org.jetbrains.annotations.**
+
 # Less spammy.
 -dontnote
 
 # The lite proto runtime uses reflection to access fields based on the names in
-# the schema, keep all the fields.
--keepclassmembers class * extends com.google.protobuf.MessageLite { <fields>; }
+# the schema, keep all the fields. Wildcard is used to apply the rule to classes
+# that have been renamed with jarjar.
+-keepclassmembers class * extends **.protobuf.MessageLite { <fields>; }
diff --git a/core/rbe.mk b/core/rbe.mk
index 370d4bd..65abde5 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -46,12 +46,6 @@
     cxx_compare := false
   endif
 
-  ifdef RBE_CXX_COMPARE
-    cxx_compare := $(RBE_CXX_COMPARE)
-  else
-    cxx_compare := "false"
-  endif
-
   ifdef RBE_JAVAC_EXEC_STRATEGY
     javac_exec_strategy := $(RBE_JAVAC_EXEC_STRATEGY)
   else
@@ -87,11 +81,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(JAVA))
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=out/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=prebuilts/jdk/jdk11/linux-x86/bin/java)
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=$(JAVA))
   endif
 
   rbe_dir :=
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index 483b957..56d2312 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -18,7 +18,7 @@
     <option name="test-suite-tag" value="robolectric" />
     <option name="test-suite-tag" value="robolectric-tests" />
 
-    <option name="java-folder" value="prebuilts/jdk/jdk11/linux-x86/" />
+    <option name="java-folder" value="prebuilts/jdk/jdk17/linux-x86/" />
     <option name="exclude-paths" value="java" />
     <option name="use-robolectric-resources" value="true" />
 
@@ -26,5 +26,12 @@
 
     <test class="com.android.tradefed.testtype.IsolatedHostTest" >
         <option name="jar" value="{MODULE}.jar" />
+        <option name="java-flags" value="--add-modules=jdk.compiler"/>
+        <option name="java-flags" value="--add-opens=java.base/java.lang=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-opens=java.base/java.lang.reflect=ALL-UNNAMED"/>
+        <!-- b/238100560 -->
+        <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/>
+        <!-- b/251387255 -->
+        <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/>
     </test>
 </configuration>
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
deleted file mode 100644
index 1742925..0000000
--- a/core/sdk_font.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-###############################################################################
-# Fonts shipped with the SDK need to be renamed for Java to handle them
-# properly. Hence, a special script is used to rename the fonts. We bundle all
-# the fonts that are shipped on a newer non-space-constrained device. However,
-# OpenType fonts used on these devices are not supported by Java. Their
-# replacements are added separately.
-###############################################################################
-
-
-# The script that renames the font.
-sdk_font_rename_script := frameworks/layoutlib/rename_font/build_font_single.py
-
-# Location of the fonttools library that the above script depends on.
-fonttools_lib := external/fonttools/Lib
-
-# A temporary location to store the renamed fonts. atree picks all files in
-# this directory and bundles it with the SDK.
-SDK_FONT_TEMP := $(call intermediates-dir-for,PACKAGING,sdk-fonts,HOST,COMMON)
-
-# The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
-sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
-sdk_font_config :=  $(addprefix $(SDK_FONT_TEMP)/standard/, $(notdir $(sdk_font_config)))
-
-$(sdk_font_config): $(SDK_FONT_TEMP)/standard/%.xml: \
-			frameworks/base/data/fonts/%.xml
-	$(hide) mkdir -p $(dir $@)
-	$(hide) cp -vf $< $@
-
-# List of fonts on the device that we want to ship. This is all .ttf fonts.
-sdk_fonts_device := $(filter $(TARGET_OUT)/fonts/%.ttf, $(INTERNAL_SYSTEMIMAGE_FILES))
-sdk_fonts_device := $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_fonts_device)))
-
-# Macro to rename the font.
-sdk_rename_font = PYTHONPATH=$$PYTHONPATH:$(fonttools_lib) $(sdk_font_rename_script) \
-	    $1 $2
-
-# TODO: If the font file is a symlink, reuse the font renamed from the symlink
-# target.
-$(sdk_fonts_device): $(SDK_FONT_TEMP)/%.ttf: $(TARGET_OUT)/fonts/%.ttf \
-			$(sdk_font_rename_script)
-	$(hide) mkdir -p $(dir $@)
-	$(hide) $(call sdk_rename_font,$<,$@)
-
-# List of all dependencies - all fonts and configuration files.
-SDK_FONT_DEPS := $(sdk_fonts_device) $(sdk_font_config)
-
-# Define a macro to create rule for addititional fonts that we want to include
-# in the SDK.
-# $1 Output font name
-# $2 Source font path
-define sdk-extra-font-rule
-fontfullname := $$(SDK_FONT_TEMP)/$1
-ifeq ($$(filter $$(fontfullname),$$(sdk_fonts_device)),)
-SDK_FONT_DEPS += $$(fontfullname)
-$$(fontfullname): $2 $$(sdk_font_rename_script)
-	$$(hide) mkdir -p $$(dir $$@)
-	$$(hide) $$(call sdk_rename_font,$$<,$$@)
-endif
-fontfullname :=
-endef
-
-# These extra fonts are used as a replacement for OpenType fonts.
-$(eval $(call sdk-extra-font-rule,NanumGothic.ttf,external/naver-fonts/NanumGothic.ttf))
-$(eval $(call sdk-extra-font-rule,DroidSansFallback.ttf,frameworks/base/data/fonts/DroidSansFallbackFull.ttf))
-
-sdk-extra-font-rule :=
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 006e1dc..786a755 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -100,18 +100,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(LOCAL_BUILT_MODULE),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(LOCAL_BUILT_MODULE),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
@@ -159,12 +165,14 @@
 ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
   my_2nd_arch_prefix :=
   LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+  partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
   include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
 endif
 ifdef TARGET_2ND_ARCH
   ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
     my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
     LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
     include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
   endif
 endif
@@ -172,6 +180,7 @@
 my_embed_jni :=
 my_prebuilt_jni_libs :=
 my_2nd_arch_prefix :=
+partition_lib_pairs :=
 
 PACKAGES := $(PACKAGES) $(LOCAL_MODULE)
 ifndef LOCAL_CERTIFICATE
diff --git a/core/soong_cc_rust_prebuilt.mk b/core/soong_cc_rust_prebuilt.mk
index ca52374..05b4b6b 100644
--- a/core/soong_cc_rust_prebuilt.mk
+++ b/core/soong_cc_rust_prebuilt.mk
@@ -50,6 +50,28 @@
 # to avoid checkbuilds making an extra copy of every module.
 LOCAL_CHECKED_MODULE := $(LOCAL_PREBUILT_MODULE_FILE)
 
+my_check_same_vndk_variants :=
+same_vndk_variants_stamp :=
+ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
+  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
+    ifneq ($(CLANG_COVERAGE),true)
+      # Do not compare VNDK variant for special cases e.g. coverage builds.
+      ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
+        my_check_same_vndk_variants := true
+        same_vndk_variants_stamp := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))/same_vndk_variants.timestamp
+      endif
+    endif
+  endif
+endif
+
+ifeq ($(my_check_same_vndk_variants),true)
+  # Add the timestamp to the CHECKED list so that `checkbuild` can run it.
+  # Note that because `checkbuild` doesn't check LOCAL_BUILT_MODULE for soong-built modules adding
+  # the timestamp to LOCAL_BUILT_MODULE isn't enough. It is skipped when the vendor variant
+  # isn't used at all and it may break in the downstream trees.
+  LOCAL_ADDITIONAL_CHECKED_MODULE := $(same_vndk_variants_stamp)
+endif
+
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
@@ -125,21 +147,7 @@
   endif
 endif
 
-my_check_same_vndk_variants :=
-ifeq ($(LOCAL_CHECK_SAME_VNDK_VARIANTS),true)
-  ifeq ($(filter hwaddress address, $(SANITIZE_TARGET)),)
-    ifneq ($(CLANG_COVERAGE),true)
-        # Do not compare VNDK variant for special cases e.g. coverage builds.
-        ifneq ($(SKIP_VNDK_VARIANTS_CHECK),true)
-            my_check_same_vndk_variants := true
-        endif
-    endif
-  endif
-endif
-
 ifeq ($(my_check_same_vndk_variants),true)
-  same_vndk_variants_stamp := $(intermediates)/same_vndk_variants.timestamp
-
   my_core_register_name := $(subst .vendor,,$(subst .product,,$(my_register_name)))
   my_core_variant_files := $(call module-target-built-files,$(my_core_register_name))
   my_core_shared_lib := $(sort $(filter %.so,$(my_core_variant_files)))
@@ -184,7 +192,7 @@
       # drop /root as /root is mounted as /
       my_unstripped_path := $(patsubst $(TARGET_OUT_UNSTRIPPED)/root/%,$(TARGET_OUT_UNSTRIPPED)/%, $(my_unstripped_path))
       symbolic_output := $(my_unstripped_path)/$(my_installed_module_stem)
-      $(eval $(call copy-one-file,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
+      $(eval $(call copy-unstripped-elf-file-with-mapping,$(LOCAL_SOONG_UNSTRIPPED_BINARY),$(symbolic_output)))
       $(LOCAL_BUILT_MODULE): | $(symbolic_output)
 
       ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
@@ -260,6 +268,9 @@
 installed_static_library_notice_file_targets += \
     $(foreach lib,$(LOCAL_RLIB_LIBRARIES), \
       NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-RLIB_LIBRARIES-$(lib))
+installed_static_library_notice_file_targets += \
+    $(foreach lib,$(LOCAL_PROC_MACRO_LIBRARIES), \
+      NOTICE-$(if $(LOCAL_IS_HOST_MODULE),HOST$(if $(my_host_cross),_CROSS,),TARGET)-PROC_MACRO_LIBRARIES-$(lib))
 
 $(notice_target): | $(installed_static_library_notice_file_targets)
 $(LOCAL_INSTALLED_MODULE): | $(notice_target)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 916dfbb..b000df6 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -33,6 +33,8 @@
 $(call add_json_str,  Platform_security_patch,           $(PLATFORM_SECURITY_PATCH))
 $(call add_json_str,  Platform_preview_sdk_version,      $(PLATFORM_PREVIEW_SDK_VERSION))
 $(call add_json_str,  Platform_base_os,                  $(PLATFORM_BASE_OS))
+$(call add_json_str,  Platform_version_last_stable,      $(PLATFORM_VERSION_LAST_STABLE))
+$(call add_json_str,  Platform_version_known_codenames,  $(PLATFORM_VERSION_KNOWN_CODENAMES))
 
 $(call add_json_str,  Platform_min_supported_target_sdk_version, $(PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION))
 
@@ -92,6 +94,7 @@
 $(call add_json_list, AAPTPrebuiltDPI,                   $(PRODUCT_AAPT_PREBUILT_DPI))
 
 $(call add_json_str,  DefaultAppCertificate,             $(PRODUCT_DEFAULT_DEV_CERTIFICATE))
+$(call add_json_str,  MainlineSepolicyDevCertificates,   $(MAINLINE_SEPOLICY_DEV_CERTIFICATES))
 
 $(call add_json_str,  AppsDefaultVersionName,            $(APPS_DEFAULT_VERSION_NAME))
 
@@ -169,6 +172,8 @@
 $(call add_json_list, RecoverySnapshotDirsExcluded,      $(RECOVERY_SNAPSHOT_DIRS_EXCLUDED))
 $(call add_json_bool, HostFakeSnapshotEnabled,           $(HOST_FAKE_SNAPSHOT_ENABLE))
 
+$(call add_json_bool, MultitreeUpdateMeta,               $(filter true,$(TARGET_MULTITREE_UPDATE_META)))
+
 $(call add_json_bool, Treble_linker_namespaces,          $(filter true,$(PRODUCT_TREBLE_LINKER_NAMESPACES)))
 $(call add_json_bool, Enforce_vintf_manifest,            $(filter true,$(PRODUCT_ENFORCE_VINTF_MANIFEST)))
 
@@ -204,9 +209,8 @@
 $(call add_json_list, BoardVendorDlkmSepolicyDirs,       $(BOARD_VENDOR_DLKM_SEPOLICY_DIRS))
 $(call add_json_list, BoardOdmDlkmSepolicyDirs,          $(BOARD_ODM_DLKM_SEPOLICY_DIRS))
 $(call add_json_list, BoardSystemDlkmSepolicyDirs,       $(BOARD_SYSTEM_DLKM_SEPOLICY_DIRS))
-# TODO: BOARD_PLAT_* dirs only kept for compatibility reasons. Will be a hard error on API level 31
-$(call add_json_list, SystemExtPublicSepolicyDirs,       $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS) $(BOARD_PLAT_PUBLIC_SEPOLICY_DIR))
-$(call add_json_list, SystemExtPrivateSepolicyDirs,      $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS) $(BOARD_PLAT_PRIVATE_SEPOLICY_DIR))
+$(call add_json_list, SystemExtPublicSepolicyDirs,       $(SYSTEM_EXT_PUBLIC_SEPOLICY_DIRS))
+$(call add_json_list, SystemExtPrivateSepolicyDirs,      $(SYSTEM_EXT_PRIVATE_SEPOLICY_DIRS))
 $(call add_json_list, BoardSepolicyM4Defs,               $(BOARD_SEPOLICY_M4DEFS))
 $(call add_json_str,  BoardSepolicyVers,                 $(BOARD_SEPOLICY_VERS))
 $(call add_json_str,  SystemExtSepolicyPrebuiltApiDir,   $(BOARD_SYSTEM_EXT_PREBUILT_DIR))
@@ -227,6 +231,8 @@
 $(call add_json_list, PackageNameOverrides,              $(PRODUCT_PACKAGE_NAME_OVERRIDES))
 $(call add_json_list, CertificateOverrides,              $(PRODUCT_CERTIFICATE_OVERRIDES))
 
+$(call add_json_str, ApexGlobalMinSdkVersionOverride,    $(APEX_GLOBAL_MIN_SDK_VERSION_OVERRIDE))
+
 $(call add_json_bool, EnforceSystemCertificate,          $(filter true,$(ENFORCE_SYSTEM_CERTIFICATE)))
 $(call add_json_list, EnforceSystemCertificateAllowList, $(ENFORCE_SYSTEM_CERTIFICATE_ALLOW_LIST))
 
@@ -245,7 +251,7 @@
 $(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
   $(call add_json_map, $(namespace))\
   $(foreach key,$(SOONG_CONFIG_$(namespace)),\
-    $(call add_json_str,$(key),$(SOONG_CONFIG_$(namespace)_$(key))))\
+    $(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_$(namespace)_$(key)))))\
   $(call end_json_map))
 $(call end_json_map)
 
@@ -269,6 +275,10 @@
 
 $(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
 
+$(call add_json_bool, BuildBrokenClangProperty,           $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
+$(call add_json_bool, BuildBrokenClangAsFlags,            $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
+$(call add_json_bool, BuildBrokenClangCFlags,             $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
+$(call add_json_bool, BuildBrokenDepfile,                 $(filter true,$(BUILD_BROKEN_DEPFILE)))
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
@@ -287,6 +297,8 @@
 
 $(call add_json_bool, GenerateAidlNdkPlatformBackend, $(filter true,$(NEED_AIDL_NDK_PLATFORM_BACKEND)))
 
+$(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/soong_droiddoc_prebuilt.mk b/core/soong_droiddoc_prebuilt.mk
index 4dc5d08..ba597c5 100644
--- a/core/soong_droiddoc_prebuilt.mk
+++ b/core/soong_droiddoc_prebuilt.mk
@@ -6,6 +6,7 @@
 
 ifdef LOCAL_DROIDDOC_STUBS_SRCJAR
 $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_SRCJAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
+$(eval ALL_TARGETS.$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
 ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
 
 .PHONY: $(LOCAL_MODULE)
@@ -14,6 +15,7 @@
 
 ifdef LOCAL_DROIDDOC_DOC_ZIP
 $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+$(eval ALL_TARGETS.$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
 $(call dist-for-goals,docs,$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip)
 
 .PHONY: $(LOCAL_MODULE) $(LOCAL_MODULE)-docs.zip
@@ -23,12 +25,15 @@
 
 ifdef LOCAL_DROIDDOC_ANNOTATIONS_ZIP
 $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_ANNOTATIONS_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_annotations.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
 endif
 
 ifdef LOCAL_DROIDDOC_API_VERSIONS_XML
 $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_API_VERSIONS_XML),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)_generated-api-versions.xml.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
 endif
 
 ifdef LOCAL_DROIDDOC_METADATA_ZIP
 $(eval $(call copy-one-file,$(LOCAL_DROIDDOC_METADATA_ZIP),$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip))
+$(eval ALL_TARGETS.$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(LOCAL_MODULE)-metadata.zip.META_LIC := $(LOCAL_SOONG_LICENSE_METADATA))
 endif
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index b819cdc..a8f475f 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -62,18 +62,24 @@
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_DICT
+  my_proguard_dictionary_directory := $(local-proguard-dictionary-directory)
+  my_proguard_dictionary_mapping_directory := $(local-proguard-dictionary-mapping-directory)
   $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
     $(intermediates.COMMON)/proguard_dictionary))
-  $(eval $(call copy-one-file,$(LOCAL_SOONG_PROGUARD_DICT),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary))
+  $(eval $(call copy-r8-dictionary-file-with-mapping,\
+    $(LOCAL_SOONG_PROGUARD_DICT),\
+    $(my_proguard_dictionary_directory)/proguard_dictionary,\
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto))
   $(eval $(call copy-one-file,$(LOCAL_SOONG_CLASSES_JAR),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar))
+    $(my_proguard_dictionary_directory)/classes.jar))
   $(call add-dependency,$(common_javalib.jar),\
     $(intermediates.COMMON)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/proguard_dictionary)
+    $(my_proguard_dictionary_directory)/proguard_dictionary)
   $(call add-dependency,$(common_javalib.jar),\
-    $(call local-packaging-dir,proguard_dictionary)/classes.jar)
+    $(my_proguard_dictionary_mapping_directory)/proguard_dictionary.textproto)
+  $(call add-dependency,$(common_javalib.jar),\
+    $(my_proguard_dictionary_directory)/classes.jar)
 endif
 
 ifdef LOCAL_SOONG_PROGUARD_USAGE_ZIP
diff --git a/core/sysprop.mk b/core/sysprop.mk
index 61c07ba..b51818a 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -47,10 +47,18 @@
         echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
         echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
     )\
-    $(if $(filter system vendor odm,$(1)),\
-        echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST) " >> $(2);\
-        echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
-        echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+    $(if $(filter true,$(ZYGOTE_FORCE_64)),\
+        $(if $(filter vendor,$(1)),\
+            echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+            echo "ro.$(1).product.cpu.abilist32=" >> $(2);\
+            echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+        )\
+    ,\
+        $(if $(filter system vendor odm,$(1)),\
+            echo "ro.$(1).product.cpu.abilist=$(TARGET_CPU_ABI_LIST)" >> $(2);\
+            echo "ro.$(1).product.cpu.abilist32=$(TARGET_CPU_ABI_LIST_32_BIT)" >> $(2);\
+            echo "ro.$(1).product.cpu.abilist64=$(TARGET_CPU_ABI_LIST_64_BIT)" >> $(2);\
+        )\
     )\
     echo "ro.$(1).build.date=`$(DATE_FROM_FILE)`" >> $(2);\
     echo "ro.$(1).build.date.utc=`$(DATE_FROM_FILE) +%s`" >> $(2);\
@@ -261,7 +269,6 @@
 	        BUILD_USERNAME="$(BUILD_USERNAME)" \
 	        BUILD_HOSTNAME="$(BUILD_HOSTNAME)" \
 	        BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
-	        BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
 	        BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT="$(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT)" \
 	        PLATFORM_VERSION="$(PLATFORM_VERSION)" \
 	        PLATFORM_DISPLAY_VERSION="$(PLATFORM_DISPLAY_VERSION)" \
@@ -282,6 +289,7 @@
 	        TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
 	        TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
 	        TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
+	        ZYGOTE_FORCE_64_BIT="$(ZYGOTE_FORCE_64_BIT)" \
 	        bash $(BUILDINFO_SH) > $@
 
 ifdef TARGET_SYSTEM_PROP
diff --git a/core/tasks/README.dex_preopt_check.md b/core/tasks/README.dex_preopt_check.md
new file mode 100644
index 0000000..b0baa9e
--- /dev/null
+++ b/core/tasks/README.dex_preopt_check.md
@@ -0,0 +1,43 @@
+# `dex_preopt_check`
+
+`dex_preopt_check` is a build-time check to make sure that all system server
+jars are dexpreopted. When the check fails, you will see the following error
+message:
+
+```
+FAILED:
+build/make/core/tasks/dex_preopt_check.mk:13: warning:  Missing compilation artifacts. Dexpreopting is not working for some system server jars
+Offending entries:
+```
+
+Possible causes are:
+
+1.  There is an APEX/SDK mismatch. (E.g., the APEX is built from source while
+    the SDK is built from prebuilt.)
+
+1.  The `systemserverclasspath_fragment` is not added as
+    `systemserverclasspath_fragments` of the corresponding `apex` module, or not
+    added as `exported_systemserverclasspath_fragments` of the corresponding
+    `prebuilt_apex`/`apex_set` module when building from prebuilt.
+
+1.  The expected version of the system server java library is not preferred.
+    (E.g., the `java_import` module has `prefer: false` when building from
+    prebuilt.)
+
+1.  Dexpreopting is disabled for the system server java library. This can be due
+    to various reasons including but not limited to:
+
+    - The java library has `dex_preopt: { enabled: false }` in the Android.bp
+      file.
+
+    - The java library is listed in `DEXPREOPT_DISABLED_MODULES` in a Makefile.
+
+    - The java library is missing `installable: true` in the Android.bp
+      file when building from source.
+
+    - Sanitizer is enabled.
+
+1.  `PRODUCT_SYSTEM_SERVER_JARS`, `PRODUCT_APEX_SYSTEM_SERVER_JARS`,
+    `PRODUCT_STANDALONE_SYSTEM_SERVER_JARS`, or
+    `PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS` has an extra entry that is not
+    needed by the product.
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index b9a349d..2af1ded 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -42,4 +42,7 @@
 art-host-tests: $(art_host_tests_zip)
 $(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
 
+$(call declare-1p-container,$(art_host_tests_zip),)
+$(call declare-container-license-deps,$(art_host_tests_zip),$(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests),$(PRODUCT_OUT)/:/)
+
 tests: art-host-tests
diff --git a/core/tasks/build_custom_images.mk b/core/tasks/build_custom_images.mk
index c9b07da..680ad11 100644
--- a/core/tasks/build_custom_images.mk
+++ b/core/tasks/build_custom_images.mk
@@ -62,8 +62,6 @@
   CUSTOM_IMAGE_MODULES \
   CUSTOM_IMAGE_COPY_FILES \
   CUSTOM_IMAGE_SELINUX \
-  CUSTOM_IMAGE_SUPPORT_VERITY \
-  CUSTOM_IMAGE_SUPPORT_VERITY_FEC \
   CUSTOM_IMAGE_VERITY_BLOCK_DEVICE \
   CUSTOM_IMAGE_AVB_HASH_ENABLE \
   CUSTOM_IMAGE_AVB_ADD_HASH_FOOTER_ARGS \
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 3f84668..c8b1183 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -198,13 +198,20 @@
 $(call dist-for-goals, cts-api-coverage, $(cts-combined-coverage-report):cts-combined-coverage-report.html)
 $(call dist-for-goals, cts-api-coverage, $(cts-combined-xml-coverage-report):cts-combined-coverage-report.xml)
 
+ALL_TARGETS.$(cts-test-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-system-api-xml-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-verifier-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-combined-coverage-report).META_LIC:=$(module_license_metadata)
+ALL_TARGETS.$(cts-combined-xml-coverage-report).META_LIC:=$(module_license_metadata)
+
 # Arguments;
 #  1 - Name of the report printed out on the screen
 #  2 - List of apk files that will be scanned to generate the report
 #  3 - Format of the report
 define generate-coverage-report-cts
 	$(hide) mkdir -p $(dir $@)
-	$(hide) $(PRIVATE_CTS_API_COVERAGE_EXE) -d $(PRIVATE_DEXDEPS_EXE) -a $(PRIVATE_API_XML_DESC) -n $(PRIVATE_NAPI_XML_DESC) -f $(3) -o $@ $(2)
+	$(hide) $(PRIVATE_CTS_API_COVERAGE_EXE) -j 8 -d $(PRIVATE_DEXDEPS_EXE) -a $(PRIVATE_API_XML_DESC) -n $(PRIVATE_NAPI_XML_DESC) -f $(3) -o $@ $(2)
 	@ echo $(1): file://$$(cd $(dir $@); pwd)/$(notdir $@)
 endef
 
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 73fad7c..3196f52 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -55,4 +55,7 @@
 device-tests: $(device-tests-zip)
 $(call dist-for-goals, device-tests, $(device-tests-zip) $(device-tests-list-zip) $(device-tests-configs-zip) $(device_tests_host_shared_libs_zip))
 
+$(call declare-1p-container,$(device-tests-zip),)
+$(call declare-container-license-deps,$(device-tests-zip),$(COMPATIBILITY.device-tests.FILES) $(my_host_shared_lib_for_device_tests),$(PRODUCT_OUT)/:/)
+
 tests: device-tests
diff --git a/core/tasks/dex_preopt_check.mk b/core/tasks/dex_preopt_check.mk
index bfa1ec5..5fd60c8 100644
--- a/core/tasks/dex_preopt_check.mk
+++ b/core/tasks/dex_preopt_check.mk
@@ -12,7 +12,8 @@
   ifneq (,$(filter services,$(PRODUCT_PACKAGES)))
     $(call maybe-print-list-and-error,\
       $(filter-out $(ALL_DEFAULT_INSTALLED_MODULES),$(DEXPREOPT_SYSTEMSERVER_ARTIFACTS)),\
-      Missing compilation artifacts. Dexpreopting is not working for some system server jars \
+      Missing compilation artifacts. Dexpreopting is not working for some system server jars. See \
+      https://cs.android.com/android/platform/superproject/+/master:build/make/core/tasks/README.dex_preopt_check.md \
     )
   endif
 endif
diff --git a/core/tasks/find-shareduid-violation.mk b/core/tasks/find-shareduid-violation.mk
index d6885eb..b5feef1 100644
--- a/core/tasks/find-shareduid-violation.mk
+++ b/core/tasks/find-shareduid-violation.mk
@@ -35,4 +35,5 @@
 		--copy_out_system_ext $(TARGET_COPY_OUT_SYSTEM_EXT) \
 		> $@
 
+$(call declare-0p-target,$(shareduid_violation_modules_filename))
 $(call dist-for-goals,droidcore,$(shareduid_violation_modules_filename))
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index a820a28..5726ee2 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -40,6 +40,26 @@
 # Create an artifact to include all shared librariy files in general-tests.
 general_tests_host_shared_libs_zip := $(PRODUCT_OUT)/general-tests_host-shared-libs.zip
 
+# Copy kernel test modules to testcases directories
+include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
+ltp_copy_pairs := \
+  $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_host_out))
+kselftest_copy_pairs := \
+  $(call target-native-copy-pairs,$(kernel_kselftest_modules),$(kernel_kselftest_host_out))
+copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
+copy_kselftest_tests := $(call copy-many-files,$(kselftest_copy_pairs))
+
+# PHONY target to be used to build and test `vts_ltp_tests` and `vts_kselftest_tests` without building full vts
+.PHONY: vts_kernel_ltp_tests
+vts_kernel_ltp_tests: $(copy_ltp_tests)
+
+.PHONY: vts_kernel_kselftest_tests
+vts_kernel_kselftest_tests: $(copy_kselftest_tests)
+
+$(general_tests_zip) : $(copy_ltp_tests)
+$(general_tests_zip) : $(copy_kselftest_tests)
+$(general_tests_zip) : PRIVATE_KERNEL_LTP_HOST_OUT := $(kernel_ltp_host_out)
+$(general_tests_zip) : PRIVATE_KERNEL_KSELFTEST_HOST_OUT := $(kernel_kselftest_host_out)
 $(general_tests_zip) : PRIVATE_general_tests_list_zip := $(general_tests_list_zip)
 $(general_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip)
 $(general_tests_zip) : PRIVATE_TOOLS := $(general_tests_tools)
@@ -52,6 +72,8 @@
 	rm -f $@ $(PRIVATE_general_tests_list_zip)
 	mkdir -p $(PRIVATE_INTERMEDIATES_DIR) $(PRIVATE_INTERMEDIATES_DIR)/tools
 	echo $(sort $(COMPATIBILITY.general-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+	find $(PRIVATE_KERNEL_LTP_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
+	find $(PRIVATE_KERNEL_KSELFTEST_HOST_OUT) >> $(PRIVATE_INTERMEDIATES_DIR)/list
 	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
 	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
 	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
@@ -78,6 +100,9 @@
 general-tests: $(general_tests_zip)
 $(call dist-for-goals, general-tests, $(general_tests_zip) $(general_tests_list_zip) $(general_tests_configs_zip) $(general_tests_host_shared_libs_zip))
 
+$(call declare-1p-container,$(general_tests_zip),)
+$(call declare-container-license-deps,$(general_tests_zip),$(COMPATIBILITY.general-tests.FILES) $(general_tests_tools) $(my_host_shared_lib_for_general_tests),$(PRODUCT_OUT)/:/)
+
 intermediates_dir :=
 general_tests_tools :=
 general_tests_zip :=
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index 755b589..ed2f2a6 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -47,4 +47,7 @@
 host-unit-tests: $(host_unit_tests_zip)
 $(call dist-for-goals, host-unit-tests, $(host_unit_tests_zip))
 
+$(call declare-1p-container,$(host_unit_tests_zip),)
+$(call declare-container-license-deps,$(host_unit_tests_zip),$(COMPATIBILITY.host-unit-tests.FILES) $(my_host_shared_lib_for_host_unit_tests),$(PRODUCT_OUT)/:/)
+
 tests: host-unit-tests
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index 8097535..e83d408 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -24,10 +24,14 @@
 			'"classes_jar": [$(foreach w,$(sort $(ALL_MODULES.$(m).CLASSES_JAR)),"$(w)", )], ' \
 			'"test_mainline_modules": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_MAINLINE_MODULES)),"$(w)", )], ' \
 			'"is_unit_test": "$(ALL_MODULES.$(m).IS_UNIT_TEST)", ' \
+			'"test_options_tags": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_OPTIONS_TAGS)),"$(w)", )], ' \
 			'"data": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA)),"$(w)", )], ' \
 			'"runtime_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_RUNTIME_LIBRARIES)),"$(w)", )], ' \
+			'"static_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).LOCAL_STATIC_LIBRARIES)),"$(w)", )], ' \
 			'"data_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TEST_DATA_BINS)),"$(w)", )], ' \
 			'"supported_variants": [$(foreach w,$(sort $(ALL_MODULES.$(m).SUPPORTED_VARIANTS)),"$(w)", )], ' \
+			'"host_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).HOST_REQUIRED_FROM_TARGET)),"$(w)", )], ' \
+			'"target_dependencies": [$(foreach w,$(sort $(ALL_MODULES.$(m).TARGET_REQUIRED_FROM_HOST)),"$(w)", )], ' \
 			'},\n' \
 	 ) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
 	$(hide) echo '}' >> $@
@@ -37,3 +41,9 @@
 
 $(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
 $(call dist-for-goals, droidcore-unbundled, $(MODULE_INFO_JSON))
+
+# On every build, generate an all_modules.txt file to be used for autocompleting
+# the m command. After timing this using $(shell date +"%s.%3N"), it only adds
+# 0.01 seconds to the internal master build, and will only rerun on builds that
+# rerun kati.
+$(file >$(PRODUCT_OUT)/all_modules.txt,$(subst $(space),$(newline),$(ALL_MODULES)))
diff --git a/target/product/iorap_large_memory_config.mk b/core/tasks/multitree.mk
similarity index 81%
copy from target/product/iorap_large_memory_config.mk
copy to core/tasks/multitree.mk
index 0c6c89a..225477e 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/core/tasks/multitree.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,4 +11,6 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+.PHONY: update-meta
+update-meta: $(SOONG_MULTITREE_METADATA)
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
index a847b9d..134be01 100644
--- a/core/tasks/oem_image.mk
+++ b/core/tasks/oem_image.mk
@@ -43,4 +43,7 @@
 oem_image : $(INSTALLED_OEMIMAGE_TARGET)
 $(call dist-for-goals, oem_image, $(INSTALLED_OEMIMAGE_TARGET))
 
+$(call declare-1p-container,$(INSTALLED_OEMIMAGE_TARGET),)
+$(call declare-container-license-deps,$(INSTALLED_OEMIMAGE_TARGET),$(INTERNAL_USERIMAGE_DEPS) $(INTERNAL_OEMIMAGE_FILES),$(INSTALLED_OEMIMAGE_TARGET):)
+
 endif  # oem_image in $(MAKECMDGOALS)
diff --git a/core/tasks/owners.mk b/core/tasks/owners.mk
index 6f32aaf..806b8ee 100644
--- a/core/tasks/owners.mk
+++ b/core/tasks/owners.mk
@@ -31,3 +31,5 @@
 owners : $(owners_zip)
 
 $(call dist-for-goals, general-tests, $(owners_zip))
+
+$(call declare-0p-target,$(owners_zip))
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
index da64cab..0b0c93c 100644
--- a/core/tasks/test_mapping.mk
+++ b/core/tasks/test_mapping.mk
@@ -36,3 +36,5 @@
 test_mapping : $(test_mappings_zip)
 
 $(call dist-for-goals, dist_files test_mapping,$(test_mappings_zip))
+
+$(call declare-1p-target,$(test_mappings_zip),)
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index f9ae2c1..2626120 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -91,9 +91,6 @@
 $(my_built_custom_image): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
 $(my_built_custom_image): PRIVATE_PICKUP_FILES := $(my_pickup_files)
 $(my_built_custom_image): PRIVATE_SELINUX := $(CUSTOM_IMAGE_SELINUX)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY := $(CUSTOM_IMAGE_SUPPORT_VERITY)
-$(my_built_custom_image): PRIVATE_SUPPORT_VERITY_FEC := $(CUSTOM_IMAGE_SUPPORT_VERITY_FEC)
-$(my_built_custom_image): PRIVATE_VERITY_KEY := $(PRODUCT_VERITY_SIGNING_KEY)
 $(my_built_custom_image): PRIVATE_VERITY_BLOCK_DEVICE := $(CUSTOM_IMAGE_VERITY_BLOCK_DEVICE)
 $(my_built_custom_image): PRIVATE_DICT_FILE := $(CUSTOM_IMAGE_DICT_FILE)
 $(my_built_custom_image): PRIVATE_AVB_AVBTOOL := $(AVBTOOL)
@@ -108,9 +105,6 @@
 else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
   $(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
 endif
-ifeq (true,$(CUSTOM_IMAGE_SUPPORT_VERITY_FEC))
-  $(my_built_custom_image): $(FEC)
-endif
 $(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) $(my_custom_image_modules_dep) \
   $(CUSTOM_IMAGE_DICT_FILE)
 	@echo "Build image $@"
@@ -130,13 +124,6 @@
 	$(hide) echo "partition_size=$(PRIVATE_PARTITION_SIZE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(hide) echo "ext_mkuserimg=$(notdir $(MKEXTUSERIMG))" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(if $(PRIVATE_SELINUX),$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
-	$(if $(PRIVATE_SUPPORT_VERITY),\
-	  $(hide) echo "verity=$(PRIVATE_SUPPORT_VERITY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
-	    echo "verity_key=$(PRIVATE_VERITY_KEY)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
-	    echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt;\
-	    echo "verity_block_device=$(PRIVATE_VERITY_BLOCK_DEVICE)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
-	$(if $(PRIVATE_SUPPORT_VERITY_FEC),\
-	  $(hide) echo "verity_fec=$(PRIVATE_SUPPORT_VERITY_FEC)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	$(if $(filter eng, $(TARGET_BUILD_VARIANT)),$(hide) echo "verity_disable=true" >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
 	$(hide) echo "avb_avbtool=$(PRIVATE_AVB_AVBTOOL)" >> $(PRIVATE_INTERMEDIATES)/image_info.txt
 	$(if $(PRIVATE_AVB_KEY_PATH),\
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 3b348bd..a5f162a 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -53,9 +53,20 @@
 $(test_suite_jdk): $(SOONG_ZIP)
 	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR)
 
-$(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,restricted,\
+$(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,permissive,\
   $(test_suite_jdk_dir)/legal/java.base/LICENSE,JDK,prebuilts/jdk/$(notdir $(patsubst %/,%,$(dir $(test_suite_jdk_dir)))))
 
+# Copy license metadata
+$(call declare-copy-target-license-metadata,$(out_dir)/$(notdir $(test_suite_jdk)),$(test_suite_jdk))
+$(foreach t,$(test_tools) $(test_suite_prebuilt_tools),\
+  $(eval _dst := $(out_dir)/tools/$(notdir $(t)))\
+  $(if $(strip $(ALL_TARGETS.$(t).META_LIC)),\
+    $(call declare-copy-target-license-metadata,$(_dst),$(t)),\
+    $(warning $(t) has no license metadata)\
+  )\
+)
+test_copied_tools := $(foreach t,$(test_tools) $(test_suite_prebuilt_tools), $(out_dir)/tools/$(notdir $(t))) $(out_dir)/$(notdir $(test_suite_jdk))
+
 
 # Include host shared libraries
 host_shared_libs := $(call copy-many-files, $(COMPATIBILITY.$(test_suite_name).HOST_SHARED_LIBRARY.FILES))
@@ -65,7 +76,7 @@
     $(eval _src := $(call word-colon,1,$(p)))\
     $(eval _dst := $(call word-colon,2,$(p)))\
     $(if $(strip $(ALL_TARGETS.$(_src).META_LIC)),\
-      $(eval ALL_TARGETS.$(_dst).META_LIC := $(ALL_TARGETS.$(_src).META_LIC)),\
+      $(call declare-copy-target-license-metadata,$(_dst),$(_src)),\
       $(warning $(_src) has no license metadata for $(_dst))\
     )\
   )\
@@ -80,6 +91,7 @@
   $(MERGE_ZIPS) \
   $(SOONG_ZIP) \
   $(host_shared_libs) \
+  $(test_suite_extra_deps) \
 
 compatibility_zip_resources := $(out_dir)/tools $(out_dir)/testcases $(out_dir)/lib $(out_dir)/lib64
 
@@ -120,8 +132,10 @@
 	$(SOONG_ZIP) -d -o $(PRIVATE_tests_list_zip) -j -f $(PRIVATE_tests_list)
 	rm -f $(PRIVATE_tests_list)
 
+$(call declare-0p-target,$(compatibility_tests_list_zip),)
+
 $(call declare-1p-container,$(compatibility_zip),)
-$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_suite_jdk),$(out_dir)/:/)
+$(call declare-container-license-deps,$(compatibility_zip),$(compatibility_zip_deps) $(test_copied_tools), $(out_dir)/:/)
 
 $(eval $(call html-notice-rule,$(test_suite_notice_html),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
 $(eval $(call text-notice-rule,$(test_suite_notice_txt),"Test suites","Notices for files contained in the test suites filesystem image:",$(compatibility_zip),$(compatibility_zip)))
@@ -129,6 +143,9 @@
 $(call declare-0p-target,$(test_suite_notice_html))
 $(call declare-0p-target,$(test_suite_notice_txt))
 
+$(call declare-1p-copy-files,$(test_suite_dynamic_config),)
+$(call declare-1p-copy-files,$(test_suite_prebuilt_tools),)
+
 # Reset all input variables
 test_suite_name :=
 test_suite_tradefed :=
@@ -139,3 +156,4 @@
 test_suite_jdk :=
 test_suite_jdk_dir :=
 host_shared_libs :=
+test_suite_extra_deps :=
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index 20a1694..c41aec5 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -21,11 +21,13 @@
 LOCAL_MODULE := $(my_package_name)
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
 LOCAL_LICENSE_CONDITIONS := notice
+LOCAL_LICENSE_PACKAGE_NAME := Android
+LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
 LOCAL_MODULE_CLASS := PACKAGING
 LOCAL_MODULE_STEM := $(my_package_name).zip
 LOCAL_UNINSTALLABLE_MODULE := true
 include $(BUILD_SYSTEM)/base_rules.mk
-my_staging_dir := $(intermediates)
+my_staging_dir := $(intermediates)/staging
 my_package_zip := $(LOCAL_BUILT_MODULE)
 
 my_built_modules := $(foreach p,$(my_copy_pairs),$(call word-colon,1,$(p)))
@@ -92,17 +94,18 @@
 endif
 
 $(my_package_zip): PRIVATE_COPY_PAIRS := $(my_copy_pairs)
+$(my_package_zip): PRIVATE_STAGING_DIR := $(my_staging_dir)
 $(my_package_zip): PRIVATE_PICKUP_FILES := $(my_pickup_files)
 $(my_package_zip) : $(my_built_modules)
 	@echo "Package $@"
-	@rm -rf $(dir $@) && mkdir -p $(dir $@)
+	@rm -rf $(PRIVATE_STAGING_DIR) && mkdir -p $(PRIVATE_STAGING_DIR)
 	$(foreach p, $(PRIVATE_COPY_PAIRS),\
 	  $(eval pair := $(subst :,$(space),$(p)))\
 	  mkdir -p $(dir $(word 2,$(pair))) && \
 	  cp -Rf $(word 1,$(pair)) $(word 2,$(pair)) && ) true
 	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
-	  cp -RfL $(f) $(dir $@) && ) true
-	$(hide) cd $(dir $@) && zip -rqX $(notdir $@) *
+	  cp -RfL $(f) $(PRIVATE_STAGING_DIR) && ) true
+	$(hide) cd $(PRIVATE_STAGING_DIR) && zip -rqX ../$(notdir $@) *
 
 my_makefile :=
 my_staging_dir :=
diff --git a/core/tasks/tools/vts-kernel-tests.mk b/core/tasks/tools/vts-kernel-tests.mk
new file mode 100644
index 0000000..bd115c9
--- /dev/null
+++ b/core/tasks/tools/vts-kernel-tests.mk
@@ -0,0 +1,29 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+-include external/linux-kselftest/android/kselftest_test_list.mk
+-include external/ltp/android/ltp_package_list.mk
+
+include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
+
+# Copy kernel test modules to testcases directories
+kernel_ltp_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_ltp_tests
+kernel_ltp_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_ltp_tests
+kernel_ltp_modules := \
+    ltp \
+    $(ltp_packages)
+
+kernel_kselftest_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_kselftest_tests
+kernel_kselftest_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_kselftest_tests
+kernel_kselftest_modules := $(kselftest_modules)
diff --git a/core/tasks/tools/vts_package_utils.mk b/core/tasks/tools/vts_package_utils.mk
index 47bf29c..06161f0 100644
--- a/core/tasks/tools/vts_package_utils.mk
+++ b/core/tasks/tools/vts_package_utils.mk
@@ -29,5 +29,6 @@
       $(eval my_copy_dest := $(patsubst data/%,DATA/%,\
                                $(patsubst system/%,DATA/%,\
                                    $(patsubst $(PRODUCT_OUT)/%,%,$(ins)))))\
+      $(call declare-copy-target-license-metadata,$(2)/$(my_copy_dest),$(bui))\
       $(bui):$(2)/$(my_copy_dest))))
 endef
diff --git a/core/tasks/tradefed-tests-list.mk b/core/tasks/tradefed-tests-list.mk
index bcbdfcf..61bf136 100644
--- a/core/tasks/tradefed-tests-list.mk
+++ b/core/tasks/tradefed-tests-list.mk
@@ -35,4 +35,6 @@
 tradefed-tests-list : $(tradefed_tests_list_zip)
 $(call dist-for-goals, tradefed-tests-list, $(tradefed_tests_list_zip))
 
+$(call declare-1p-target,$(tradefed_tests_list_zip),)
+
 tests: tradefed-tests-list
diff --git a/core/tasks/vts-core-tests.mk b/core/tasks/vts-core-tests.mk
index 3c838b5..bd7652b 100644
--- a/core/tasks/vts-core-tests.mk
+++ b/core/tasks/vts-core-tests.mk
@@ -12,37 +12,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
--include external/linux-kselftest/android/kselftest_test_list.mk
--include external/ltp/android/ltp_package_list.mk
-
-include $(BUILD_SYSTEM)/tasks/tools/vts_package_utils.mk
-
 test_suite_name := vts
 test_suite_tradefed := vts-tradefed
 test_suite_readme := test/vts/tools/vts-core-tradefed/README
 
-# Copy kernel test modules to testcases directories
-kernel_test_host_out := $(HOST_OUT_TESTCASES)/vts_kernel_tests
-kernel_test_vts_out := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)/testcases/vts_kernel_tests
-kernel_test_modules := \
-    $(kselftest_modules) \
-    ltp \
-    $(ltp_packages)
+include $(BUILD_SYSTEM)/tasks/tools/vts-kernel-tests.mk
 
-kernel_test_copy_pairs := \
-  $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_vts_out)) \
-  $(call target-native-copy-pairs,$(kernel_test_modules),$(kernel_test_host_out))
+ltp_copy_pairs := \
+  $(call target-native-copy-pairs,$(kernel_ltp_modules),$(kernel_ltp_vts_out))
+kselftest_copy_pairs := \
+  $(call target-native-copy-pairs,$(kernel_kselftest_modules),$(kernel_kselftest_vts_out))
 
-copy_kernel_tests := $(call copy-many-files,$(kernel_test_copy_pairs))
+copy_ltp_tests := $(call copy-many-files,$(ltp_copy_pairs))
+copy_kselftest_tests := $(call copy-many-files,$(kselftest_copy_pairs))
 
-# PHONY target to be used to build and test `vts_kernel_tests` without building full vts
-.PHONY: vts_kernel_tests
-vts_kernel_tests: $(copy_kernel_tests)
+test_suite_extra_deps := $(copy_ltp_tests) $(copy_kselftest_tests)
 
 include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
-$(compatibility_zip): $(copy_kernel_tests)
-
 .PHONY: vts
 vts: $(compatibility_zip) $(compatibility_tests_list_zip)
 $(call dist-for-goals, vts, $(compatibility_zip) $(compatibility_tests_list_zip))
diff --git a/core/tasks/with-license.mk b/core/tasks/with-license.mk
index 469ad76..d41e77a 100644
--- a/core/tasks/with-license.mk
+++ b/core/tasks/with-license.mk
@@ -37,6 +37,10 @@
 		RADIO/bootloader.img:bootloader.img RADIO/radio.img:radio.img \
 		IMAGES/*.img:. OTA/android-info.txt:android-info.txt
 endif
+
+$(call declare-1p-container,$(license_image_input_zip),build)
+$(call declare-container-deps,$(license_image_input_zip),$(BUILT_TARGET_FILES_PACKAGE))
+
 with_license_zip := $(PRODUCT_OUT)/$(name).sh
 $(with_license_zip): PRIVATE_NAME := $(name)
 $(with_license_zip): PRIVATE_INPUT_ZIP := $(license_image_input_zip)
@@ -48,3 +52,7 @@
 		$(PRIVATE_INPUT_ZIP) $(PRIVATE_NAME) $(PRIVATE_VENDOR_BLOBS_LICENSE)
 with-license : $(with_license_zip)
 $(call dist-for-goals, with-license, $(with_license_zip))
+
+$(call declare-1p-container,$(with_license_zip),)
+$(call declare-container-license-deps,$(with_license_zip),$(license_image_input_zip),$(with_license_zip):)
+
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 038b9c4..a7d023f 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,25 +40,25 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := TP1A
+DEFAULT_PLATFORM_VERSION := UP1A
 .KATI_READONLY := DEFAULT_PLATFORM_VERSION
-MIN_PLATFORM_VERSION := TP1A
-MAX_PLATFORM_VERSION := TP1A
+MIN_PLATFORM_VERSION := UP1A
+MAX_PLATFORM_VERSION := UP1A
 
 # The last stable version name of the platform that was released.  During
 # development, this stays at that previous version, while the codename indicates
 # further work based on the previous version.
-PLATFORM_VERSION_LAST_STABLE := 12
+PLATFORM_VERSION_LAST_STABLE := 13
 .KATI_READONLY := PLATFORM_VERSION_LAST_STABLE
 
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
-PLATFORM_VERSION_CODENAME.TP1A := Tiramisu
+PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
 
 # This is the user-visible version.  In a final release build it should
 # be empty to use PLATFORM_VERSION as the user-visible version.  For
 # a preview release it can be set to a user-friendly value like `12 Preview 1`
-PLATFORM_DISPLAY_VERSION :=
+PLATFORM_DISPLAY_VERSION := 13
 
 ifndef PLATFORM_SDK_VERSION
   # This is the canonical definition of the SDK version, which defines
@@ -73,20 +73,25 @@
   # When you increment the PLATFORM_SDK_VERSION please ensure you also
   # clear out the following text file of all older PLATFORM_VERSION's:
   # cts/tests/tests/os/assets/platform_versions.txt
-  PLATFORM_SDK_VERSION := 32
+  PLATFORM_SDK_VERSION := 33
 endif
 .KATI_READONLY := PLATFORM_SDK_VERSION
 
 # This is the sdk extension version of this tree.
-PLATFORM_SDK_EXTENSION_VERSION := 1
+PLATFORM_SDK_EXTENSION_VERSION := 3
 .KATI_READONLY := PLATFORM_SDK_EXTENSION_VERSION
 
 # This is the sdk extension version that PLATFORM_SDK_VERSION ships with.
-PLATFORM_BASE_SDK_EXTENSION_VERSION := 1
+PLATFORM_BASE_SDK_EXTENSION_VERSION := $(PLATFORM_SDK_EXTENSION_VERSION)
 .KATI_READONLY := PLATFORM_BASE_SDK_EXTENSION_VERSION
 
-# This is are all known codenames starting from Q.
-PLATFORM_VERSION_KNOWN_CODENAMES := Q R S Sv2 Tiramisu
+# This are all known codenames.
+PLATFORM_VERSION_KNOWN_CODENAMES := \
+Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
+Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
+JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
+Q R S Sv2 Tiramisu UpsideDownCake
+
 # Convert from space separated list to comma separated
 PLATFORM_VERSION_KNOWN_CODENAMES := \
   $(call normalize-comma-list,$(PLATFORM_VERSION_KNOWN_CODENAMES))
@@ -98,9 +103,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2022-03-05
+    PLATFORM_SECURITY_PATCH := 2022-10-05
 endif
-.KATI_READONLY := PLATFORM_SECURITY_PATCH
 
 include $(BUILD_SYSTEM)/version_util.mk
-
diff --git a/core/version_util.mk b/core/version_util.mk
index 3a0d4b5..cbfef96 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -56,36 +56,34 @@
 # unreleased API level targetable by this branch, not just those that are valid
 # lunch targets for this branch.
 
+PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
 ifndef PLATFORM_VERSION_CODENAME
-  PLATFORM_VERSION_CODENAME := $(PLATFORM_VERSION_CODENAME.$(TARGET_PLATFORM_VERSION))
-  ifndef PLATFORM_VERSION_CODENAME
-    # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
-    PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
-  endif
-
-  # This is all of the *active* development codenames.
-  # This confusing name is needed because
-  # all_codenames has been baked into build.prop for ages.
-  #
-  # Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
-  # list of additional codenames after PLATFORM_VERSION_CODENAME.
-  PLATFORM_VERSION_ALL_CODENAMES :=
-
-  # Build a list of all active code names. Avoid duplicates, and stop when we
-  # reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
-  # that is not included in our build).
-  _versions_in_target := \
-    $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
-  $(foreach version,$(_versions_in_target),\
-    $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
-    $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
-      $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
-
-  # And convert from space separated to comma separated.
-  PLATFORM_VERSION_ALL_CODENAMES := \
-    $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
-
+  # PLATFORM_VERSION_CODENAME falls back to TARGET_PLATFORM_VERSION
+  PLATFORM_VERSION_CODENAME := $(TARGET_PLATFORM_VERSION)
 endif
+
+# This is all of the *active* development codenames.
+# This confusing name is needed because
+# all_codenames has been baked into build.prop for ages.
+#
+# Should be either the same as PLATFORM_VERSION_CODENAME or a comma-separated
+# list of additional codenames after PLATFORM_VERSION_CODENAME.
+PLATFORM_VERSION_ALL_CODENAMES :=
+
+# Build a list of all active code names. Avoid duplicates, and stop when we
+# reach a codename that matches PLATFORM_VERSION_CODENAME (anything beyond
+# that is not included in our build).
+_versions_in_target := \
+  $(call find_and_earlier,$(ALL_VERSIONS),$(TARGET_PLATFORM_VERSION))
+$(foreach version,$(_versions_in_target),\
+  $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+  $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
+    $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
+
+# And convert from space separated to comma separated.
+PLATFORM_VERSION_ALL_CODENAMES := \
+  $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+
 .KATI_READONLY := \
   PLATFORM_VERSION_CODENAME \
   PLATFORM_VERSION_ALL_CODENAMES
diff --git a/envsetup.sh b/envsetup.sh
index 7331d5e..4c1aeaa 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -10,7 +10,8 @@
               invocations of 'm' etc.
 - tapas:      tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
               Sets up the build environment for building unbundled apps (APKs).
-- banchan:    banchan <module1> [<module2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
+- banchan:    banchan <module1> [<module2> ...] [arm|x86|arm64|x86_64|arm64_only|x86_64only] \
+                      [eng|userdebug|user]
               Sets up the build environment for building unbundled modules (APEXes).
 - croot:      Changes directory to the top of the tree, or a subdirectory thereof.
 - m:          Makes from the top of the tree.
@@ -37,6 +38,7 @@
 - godir:      Go to the directory containing a file.
 - allmod:     List all modules.
 - gomod:      Go to the directory containing a module.
+- bmod:       Get the Bazel label of a Soong module if it is converted with bp2build.
 - pathmod:    Get the directory containing a module.
 - outmod:     Gets the location of a module's installed outputs with a certain extension.
 - dirmods:    Gets the modules defined in a given directory.
@@ -204,41 +206,6 @@
     fi
 
     # and in with the new
-    local prebuiltdir=$(getprebuilt)
-    local gccprebuiltdir=$(get_abs_build_var ANDROID_GCC_PREBUILTS)
-
-    # defined in core/config.mk
-    local targetgccversion=$(get_build_var TARGET_GCC_VERSION)
-    local targetgccversion2=$(get_build_var 2ND_TARGET_GCC_VERSION)
-    export TARGET_GCC_VERSION=$targetgccversion
-
-    # The gcc toolchain does not exists for windows/cygwin. In this case, do not reference it.
-    export ANDROID_TOOLCHAIN=
-    export ANDROID_TOOLCHAIN_2ND_ARCH=
-    local ARCH=$(get_build_var TARGET_ARCH)
-    local toolchaindir toolchaindir2=
-    case $ARCH in
-        x86) toolchaindir=x86/x86_64-linux-android-$targetgccversion/bin
-            ;;
-        x86_64) toolchaindir=x86/x86_64-linux-android-$targetgccversion/bin
-            ;;
-        arm) toolchaindir=arm/arm-linux-androideabi-$targetgccversion/bin
-            ;;
-        arm64) toolchaindir=aarch64/aarch64-linux-android-$targetgccversion/bin;
-               toolchaindir2=arm/arm-linux-androideabi-$targetgccversion2/bin
-            ;;
-        *)
-            echo "Can't find toolchain for unknown architecture: $ARCH"
-            toolchaindir=xxxxxxxxx
-            ;;
-    esac
-    if [ -d "$gccprebuiltdir/$toolchaindir" ]; then
-        export ANDROID_TOOLCHAIN=$gccprebuiltdir/$toolchaindir
-    fi
-
-    if [ "$toolchaindir2" -a -d "$gccprebuiltdir/$toolchaindir2" ]; then
-        export ANDROID_TOOLCHAIN_2ND_ARCH=$gccprebuiltdir/$toolchaindir2
-    fi
 
     export ANDROID_DEV_SCRIPTS=$T/development/scripts:$T/prebuilts/devtools/tools
 
@@ -251,8 +218,7 @@
             ;;
     esac
 
-    ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS):$ANDROID_TOOLCHAIN
-    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_TOOLCHAIN_2ND_ARCH
+    ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS)
     ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_DEV_SCRIPTS
 
     # Append llvm binutils prebuilts path to ANDROID_BUILD_PATHS.
@@ -287,6 +253,9 @@
     local ATEST_PATH="$T/prebuilts/asuite/atest/$os_arch"
     ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ACLOUD_PATH:$AIDEGEN_PATH:$ATEST_PATH
 
+    # Build system
+    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$T/build/bazel/bin
+
     export ANDROID_BUILD_PATHS=$(tr -s : <<<"${ANDROID_BUILD_PATHS}:")
     export PATH=$ANDROID_BUILD_PATHS$PATH
 
@@ -328,22 +297,6 @@
     #export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include
 }
 
-function bazel()
-{
-    if which bazel &>/dev/null; then
-        >&2 echo "NOTE: bazel() function sourced from Android's envsetup.sh is being used instead of $(which bazel)"
-        >&2 echo
-    fi
-
-    local T="$(gettop)"
-    if [ ! "$T" ]; then
-        >&2 echo "Couldn't locate the top of the Android tree. Try setting TOP. This bazel() function cannot be used outside of the AOSP directory."
-        return
-    fi
-
-    "$T/tools/bazel" "$@"
-}
-
 function printconfig()
 {
     local T=$(gettop)
@@ -360,8 +313,6 @@
     set_sequence_number
 
     export ANDROID_BUILD_TOP=$(gettop)
-    # With this environment variable new GCC can apply colors to warnings/errors
-    export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01'
 }
 
 function set_sequence_number()
@@ -395,16 +346,21 @@
     fi
 
     local completion_files=(
-      system/core/adb/adb.bash
+      packages/modules/adb/adb.bash
       system/core/fastboot/fastboot.bash
       tools/asuite/asuite.sh
+      prebuilts/bazel/common/bazel-complete.bash
     )
     # Completion can be disabled selectively to allow users to use non-standard completion.
     # e.g.
     # ENVSETUP_NO_COMPLETION=adb # -> disable adb completion
     # ENVSETUP_NO_COMPLETION=adb:bit # -> disable adb and bit completion
+    local T=$(gettop)
     for f in ${completion_files[*]}; do
-        if [ -f "$f" ] && should_add_completion "$f"; then
+        f="$T/$f"
+        if [ ! -f "$f" ]; then
+          echo "Warning: completion file $f not found"
+        elif should_add_completion "$f"; then
             . $f
         fi
     done
@@ -415,6 +371,8 @@
     if [ -z "$ZSH_VERSION" ]; then
         # Doesn't work in zsh.
         complete -o nospace -F _croot croot
+        # TODO(b/244559459): Support b autocompletion for zsh
+        complete -F _bazel__complete -o nospace b
     fi
     complete -F _lunch lunch
 
@@ -422,9 +380,74 @@
     complete -F _complete_android_module_names gomod
     complete -F _complete_android_module_names outmod
     complete -F _complete_android_module_names installmod
+    complete -F _complete_android_module_names bmod
     complete -F _complete_android_module_names m
 }
 
+function multitree_lunch_help()
+{
+    echo "usage: lunch PRODUCT-VARIANT" 1>&2
+    echo "    Set up android build environment based on a product short name and variant" 1>&2
+    echo 1>&2
+    echo "lunch COMBO_FILE VARIANT" 1>&2
+    echo "    Set up android build environment based on a specific lunch combo file" 1>&2
+    echo "    and variant." 1>&2
+    echo 1>&2
+    echo "lunch --print [CONFIG]" 1>&2
+    echo "    Print the contents of a configuration.  If CONFIG is supplied, that config" 1>&2
+    echo "    will be flattened and printed.  If CONFIG is not supplied, the currently" 1>&2
+    echo "    selected config will be printed.  Returns 0 on success or nonzero on error." 1>&2
+    echo 1>&2
+    echo "lunch --list" 1>&2
+    echo "    List all possible combo files available in the current tree" 1>&2
+    echo 1>&2
+    echo "lunch --help" 1>&2
+    echo "lunch -h" 1>&2
+    echo "    Prints this message." 1>&2
+}
+
+function multitree_lunch()
+{
+    local code
+    local results
+    # Lunch must be run in the topdir, but this way we get a clear error
+    # message, instead of FileNotFound.
+    local T=$(multitree_gettop)
+    if [ -n "$T" ]; then
+      "$T/orchestrator/build/orchestrator/core/lunch.py" "$@"
+    else
+      _multitree_lunch_error
+      return 1
+    fi
+    if $(echo "$1" | grep -q '^-') ; then
+        # Calls starting with a -- argument are passed directly and the function
+        # returns with the lunch.py exit code.
+        "${T}/orchestrator/build/orchestrator/core/lunch.py" "$@"
+        code=$?
+        if [[ $code -eq 2 ]] ; then
+          echo 1>&2
+          multitree_lunch_help
+          return $code
+        elif [[ $code -ne 0 ]] ; then
+          return $code
+        fi
+    else
+        # All other calls go through the --lunch variant of lunch.py
+        results=($(${T}/orchestrator/build/orchestrator/core/lunch.py --lunch "$@"))
+        code=$?
+        if [[ $code -eq 2 ]] ; then
+          echo 1>&2
+          multitree_lunch_help
+          return $code
+        elif [[ $code -ne 0 ]] ; then
+          return $code
+        fi
+
+        export TARGET_BUILD_COMBO=${results[0]}
+        export TARGET_BUILD_VARIANT=${results[1]}
+    fi
+}
+
 function choosetype()
 {
     echo "Build type choices are:"
@@ -731,6 +754,10 @@
     set_stuff_for_environment
     [[ -n "${ANDROID_QUIET_BUILD:-}" ]] || printconfig
     destroy_build_var_cache
+
+    if [[ -n "${CHECK_MU_CONFIG:-}" ]]; then
+      check_mu_config
+    fi
 }
 
 unset COMMON_LUNCH_CHOICES_CACHE
@@ -758,7 +785,9 @@
     local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|arm64|x86_64)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local density="$(echo $* | xargs -n 1 echo | \grep -E '^(ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
-    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi)$' | xargs)"
+    local keys="$(echo $* | xargs -n 1 echo | \grep -E '^(devkeys)$' | xargs)"
+    local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|arm64|x86_64|ldpi|mdpi|tvdpi|hdpi|xhdpi|xxhdpi|xxxhdpi|alldpi|devkeys)$' | xargs)"
+
 
     if [ "$showHelp" != "" ]; then
       $(gettop)/build/make/tapasHelp.sh
@@ -777,6 +806,10 @@
         echo "tapas: Error: Multiple densities supplied: $density"
         return
     fi
+    if [ $(echo $keys | wc -w) -gt 1 ]; then
+        echo "tapas: Error: Multiple keys supplied: $keys"
+        return
+    fi
 
     local product=aosp_arm
     case $arch in
@@ -784,6 +817,10 @@
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
     esac
+    if [ -n "$keys" ]; then
+        product=${product/aosp_/aosp_${keys}_}
+    fi;
+
     if [ -z "$variant" ]; then
         variant=eng
     fi
@@ -811,7 +848,7 @@
 function banchan()
 {
     local showHelp="$(echo $* | xargs -n 1 echo | \grep -E '^(help)$' | xargs)"
-    local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|x86_64)$' | xargs)"
+    local product="$(echo $* | xargs -n 1 echo | \grep -E '^(.*_)?(arm|x86|arm64|x86_64|arm64only|x86_64only)$' | xargs)"
     local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
     local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|(.*_)?(arm|x86|arm64|x86_64))$' | xargs)"
 
@@ -821,7 +858,7 @@
     fi
 
     if [ -z "$product" ]; then
-        product=arm
+        product=arm64
     elif [ $(echo $product | wc -w) -gt 1 ]; then
         echo "banchan: Error: Multiple build archs or products supplied: $products"
         return
@@ -840,6 +877,8 @@
       x86)    product=module_x86;;
       arm64)  product=module_arm64;;
       x86_64) product=module_x86_64;;
+      arm64only)  product=module_arm64only;;
+      x86_64only) product=module_x86_64only;;
     esac
     if [ -z "$variant" ]; then
         variant=eng
@@ -887,6 +926,34 @@
     fi
 }
 
+# TODO: Merge into gettop as part of launching multitree
+function multitree_gettop
+{
+    local TOPFILE=orchestrator/build/make/core/envsetup.mk
+    if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
+        # The following circumlocution ensures we remove symlinks from TOP.
+        (cd "$TOP"; PWD= /bin/pwd)
+    else
+        if [ -f $TOPFILE ] ; then
+            # The following circumlocution (repeated below as well) ensures
+            # that we record the true directory name and not one that is
+            # faked up with symlink names.
+            PWD= /bin/pwd
+        else
+            local HERE=$PWD
+            local T=
+            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
+                \cd ..
+                T=`PWD= /bin/pwd -P`
+            done
+            \cd "$HERE"
+            if [ -f "$T/$TOPFILE" ]; then
+                echo "$T"
+            fi
+        fi
+    fi
+}
+
 function croot()
 {
     local T=$(gettop)
@@ -959,7 +1026,7 @@
 # Easy way to make system.img/etc writable
 function syswrite() {
   adb wait-for-device && adb root || return 1
-  if [[ $(adb disable-verity | grep "reboot") ]]; then
+  if [[ $(adb disable-verity | grep -i "reboot") ]]; then
       echo "rebooting"
       adb reboot && adb wait-for-device && adb root || return 1
   fi
@@ -1010,7 +1077,7 @@
         return;
     fi;
     echo "Setting core limit for $PID to infinite...";
-    adb shell /system/bin/ulimit -p $PID -c unlimited
+    adb shell /system/bin/ulimit -P $PID -c unlimited
 }
 
 # core - send SIGV and pull the core for process
@@ -1484,14 +1551,49 @@
     fi
 }
 
-# List all modules for the current device, as cached in module-info.json. If any build change is
-# made and it should be reflected in the output, you should run 'refreshmod' first.
+# List all modules for the current device, as cached in all_modules.txt. If any build change is
+# made and it should be reflected in the output, you should run `m nothing` first.
 function allmod() {
-    verifymodinfo || return 1
-
-    python3 -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
+    cat $ANDROID_PRODUCT_OUT/all_modules.txt 2>/dev/null
 }
 
+# Return the Bazel label of a Soong module if it is converted with bp2build.
+function bmod()
+(
+    if [ $# -ne 1 ]; then
+        echo "usage: bmod <module>" >&2
+        return 1
+    fi
+
+    # We could run bp2build here, but it might trigger bp2build invalidation
+    # when used with `b` (e.g. --run_soong_tests) and/or add unnecessary waiting
+    # time overhead.
+    #
+    # For a snappy result, use the latest generated version in soong_injection,
+    # and ask users to run m bp2build if it doesn't exist.
+    converted_json="$(get_abs_build_var OUT_DIR)/soong/soong_injection/metrics/converted_modules_path_map.json"
+
+    if [ ! -f ${converted_json} ]; then
+      echo "bp2build files not found. Have you ran 'm bp2build'?" >&2
+      return 1
+    fi
+
+    local target_label=$(python3 -c "import json
+module = '$1'
+converted_json='$converted_json'
+bp2build_converted_map = json.load(open(converted_json))
+if module not in bp2build_converted_map:
+    exit(1)
+print(bp2build_converted_map[module] + ':' + module)")
+
+    if [ -z "${target_label}" ]; then
+      echo "$1 is not converted to Bazel." >&2
+      return 1
+    else
+      echo "${target_label}"
+    fi
+)
+
 # Get the path of a specific module in the android tree, as cached in module-info.json.
 # If any build change is made, and it should be reflected in the output, you should run
 # 'refreshmod' first.  Note: This is the inverse of dirmods.
@@ -1631,7 +1733,7 @@
 
 function _complete_android_module_names() {
     local word=${COMP_WORDS[COMP_CWORD]}
-    COMPREPLY=( $(QUIET_VERIFYMODINFO=true allmod | grep -E "^$word") )
+    COMPREPLY=( $(allmod | grep -E "^$word") )
 }
 
 # Print colored exit condition
@@ -1716,7 +1818,8 @@
 function _trigger_build()
 (
     local -r bc="$1"; shift
-    if T="$(gettop)"; then
+    local T=$(gettop)
+    if [ -n "$T" ]; then
       _wrap_build "$T/build/soong/soong_ui.bash" --build-mode --${bc} --dir="$(pwd)" "$@"
     else
       >&2 echo "Couldn't locate the top of the tree. Try setting TOP."
@@ -1724,23 +1827,6 @@
     fi
 )
 
-# Convenience entry point (like m) to use Bazel in AOSP.
-function b()
-(
-    local skip_tests=$(echo "$@" | grep -ow -- "--skip-soong-tests")
-    local bazel_args=(${@/--skip-soong-tests/})
-    # Generate BUILD, bzl files into the synthetic Bazel workspace (out/soong/workspace).
-    _trigger_build "all-modules" bp2build USE_BAZEL_ANALYSIS= "$skip_tests" || return 1
-    # Then, run Bazel using the synthetic workspace as the --package_path.
-    if [[ -z "$bazel_args" ]]; then
-        # If there are no args, show help.
-        bazel help
-    else
-        # Else, always run with the bp2build configuration, which sets Bazel's package path to the synthetic workspace.
-        bazel $bazel_args --config=bp2build
-    fi
-)
-
 function m()
 (
     _trigger_build "all-modules" "$@"
@@ -1771,6 +1857,22 @@
     _wrap_build $(get_make_command "$@") "$@"
 }
 
+function _multitree_lunch_error()
+{
+      >&2 echo "Couldn't locate the top of the tree. Please run \'source build/envsetup.sh\' and multitree_lunch from the root of your workspace."
+}
+
+function multitree_build()
+{
+    local T=$(multitree_gettop)
+    if [ -n "$T" ]; then
+      "$T/orchestrator/build/orchestrator/core/orchestrator.py" "$@"
+    else
+      _multitree_lunch_error
+      return 1
+    fi
+}
+
 function provision()
 {
     if [ ! "$ANDROID_PRODUCT_OUT" ]; then
@@ -1876,13 +1978,7 @@
             return
             ;;
     esac
-    if [[ -z "$OUT_DIR" ]]; then
-      if [[ -z "$OUT_DIR_COMMON_BASE" ]]; then
-        OUT_DIR=out
-      else
-        OUT_DIR=${OUT_DIR_COMMON_BASE}/${PWD##*/}
-      fi
-    fi
+    OUT_DIR="$(get_abs_build_var OUT_DIR)"
     if [[ "$1" == "--regenerate" ]]; then
       shift 1
       NINJA_ARGS="-t commands $@" m
@@ -1893,6 +1989,13 @@
     fi
 }
 
+function avbtool() {
+    if [[ ! -f "$ANDROID_SOONG_HOST_OUT"/bin/avbtool ]]; then
+        m avbtool
+    fi
+    "$ANDROID_SOONG_HOST_OUT"/bin/avbtool $@
+}
+
 validate_current_shell
 source_vendorsetup
 addcompletions
diff --git a/finalize-aidl-vndk-sdk-resources.sh b/finalize-aidl-vndk-sdk-resources.sh
new file mode 100755
index 0000000..8e12c49
--- /dev/null
+++ b/finalize-aidl-vndk-sdk-resources.sh
@@ -0,0 +1,50 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_aidl_vndk_sdk_resources() {
+    local top="$(dirname "$0")"/../..
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # This script is WIP and only finalizes part of the Android branch for release.
+    # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+
+    # VNDK snapshot (TODO)
+    # SDK snapshots (TODO)
+    # Update references in the codebase to new API version (TODO)
+    # ...
+
+    AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
+
+    # Generate ABI dumps
+    ANDROID_BUILD_TOP="$top" \
+        out/host/linux-x86/bin/create_reference_dumps \
+        -p aosp_arm64 --build-variant user
+
+    echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
+    # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
+    $m check-vndk-list || \
+        { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
+    echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
+
+    # Finalize resources
+    "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
+           "$top/frameworks/base/core/res/res/values/public-staging.xml" \
+           "$top/frameworks/base/core/res/res/values/public-final.xml"
+
+    # SDK finalization
+    local sdk_codename='public static final int UPSIDE_DOWN_CAKE = CUR_DEVELOPMENT;'
+    local sdk_version='public static final int UPSIDE_DOWN_CAKE = 34;'
+    local sdk_build="$top/frameworks/base/core/java/android/os/Build.java"
+
+    sed -i "s%$sdk_codename%$sdk_version%g" $sdk_build
+
+    # Force update current.txt
+    $m clobber
+    $m update-api
+}
+
+finalize_aidl_vndk_sdk_resources
+
diff --git a/finalize-cleanup.sh b/finalize-cleanup.sh
new file mode 100755
index 0000000..e3bca63
--- /dev/null
+++ b/finalize-cleanup.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+# Brings local repository to a remote head state.
+
+# set -ex
+
+function finalize_revert_local_changes_main() {
+    local top="$(dirname "$0")"/../..
+
+    repo selfupdate
+
+    repo forall -c '\
+        git checkout . ; git revert --abort ; git clean -fdx ;\
+        git checkout @ ; git b fina-step1 -D ; git reset --hard; \
+        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;'
+}
+
+finalize_revert_local_changes_main
diff --git a/finalize-step-1.sh b/finalize-step-1.sh
new file mode 100755
index 0000000..373dd28
--- /dev/null
+++ b/finalize-step-1.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Automation for finalize_branch_for_release.sh.
+# Sets up local environment, runs the finalization script and submits the results.
+# WIP:
+# - does not submit, only sends to gerrit.
+
+# set -ex
+
+function revert_to_unfinalized_state() {
+    repo forall -c '\
+        git checkout . ; git revert --abort ; git clean -fdx ;\
+        git checkout @ ; git b fina-step1 -D ; git reset --hard; \
+        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;\
+        previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT | tr \n \040)" ;\
+        if [[ $previousHash ]]; then git revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;'
+}
+
+function commit_changes() {
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start fina-step1 ;
+            git add -A . ;
+            git commit -m FINALIZATION_STEP_1_SCRIPT_COMMIT -m WILL_BE_AUTOMATICALLY_REVERTED ;
+            repo upload --cbr --no-verify -t -y . ;
+            git clean -fdx ; git reset --hard ;
+        fi'
+}
+
+function finalize_step_1_main() {
+    local top="$(dirname "$0")"/../..
+
+    repo selfupdate
+
+    revert_to_unfinalized_state
+
+    # vndk etc finalization
+    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
+
+    # move all changes to fina-step1 branch and commit with a robot message
+    commit_changes
+}
+
+finalize_step_1_main
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
new file mode 100755
index 0000000..b46390d
--- /dev/null
+++ b/finalize_branch_for_release.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main() {
+    local top="$(dirname "$0")"/../..
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # Build finalization artifacts.
+    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
+
+    # This command tests:
+    #   The release state for AIDL.
+    #   ABI difference between user and userdebug builds.
+    #   Resource/SDK finalization.
+    # In the future, we would want to actually turn the branch into the REL
+    # state and test with that.
+    AIDL_FROZEN_REL=true $m droidcore
+
+    # Build SDK (TODO)
+    # lunch sdk...
+    # m ...
+}
+
+finalize_main
+
diff --git a/help.sh b/help.sh
index e51adc1..c405959 100755
--- a/help.sh
+++ b/help.sh
@@ -26,6 +26,8 @@
     clean                   (aka clobber) equivalent to rm -rf out/
     checkbuild              Build every module defined in the source tree
     droid                   Default target
+    sync                    Build everything in the default target except the images,
+                            for use with adb sync.
     nothing                 Do not build anything, just parse and validate the build structure
 
     java                    Build all the java code in the source tree
diff --git a/rbesetup.sh b/rbesetup.sh
index 3b0e7cf..8386628 100644
--- a/rbesetup.sh
+++ b/rbesetup.sh
@@ -33,20 +33,15 @@
 # This function prefixes the given command with appropriate variables needed
 # for the build to be executed with RBE.
 function use_rbe() {
-  local RBE_LOG_DIR="/tmp"
   local RBE_BINARIES_DIR="prebuilts/remoteexecution-client/latest"
   local DOCKER_IMAGE="gcr.io/androidbuild-re-dockerimage/android-build-remoteexec-image@sha256:582efb38f0c229ea39952fff9e132ccbe183e14869b39888010dacf56b360d62"
 
   # Do not set an invocation-ID and let reproxy auto-generate one.
   USE_RBE="true" \
-  FLAG_server_address="unix:///tmp/reproxy_$RANDOM.sock" \
   FLAG_exec_root="$(gettop)" \
   FLAG_platform="container-image=docker://${DOCKER_IMAGE}" \
   RBE_use_application_default_credentials="true" \
-  RBE_log_dir="${RBE_LOG_DIR}" \
   RBE_reproxy_wait_seconds="20" \
-  RBE_output_dir="${RBE_LOG_DIR}" \
-  RBE_log_path="text://${RBE_LOG_DIR}/reproxy_log.txt" \
   RBE_CXX_EXEC_STRATEGY="remote_local_fallback" \
   RBE_cpp_dependency_scanner_plugin="${RBE_BINARIES_DIR}/dependency_scanner_go_plugin.so" \
   RBE_DIR=${RBE_BINARIES_DIR} \
diff --git a/tapasHelp.sh b/tapasHelp.sh
index 0f46130..7cb5f2c 100755
--- a/tapasHelp.sh
+++ b/tapasHelp.sh
@@ -6,7 +6,7 @@
 cd ../..
 TOP="${PWD}"
 
-message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user]
+message='usage: tapas [<App1> <App2> ...] [arm|x86|arm64|x86_64] [eng|userdebug|user] [devkeys]
 
 tapas selects individual apps to be built by the Android build system. Unlike
 "lunch", "tapas" does not request the building of images for a device.
diff --git a/target/board/Android.mk b/target/board/Android.mk
index 142270e..21c0c10 100644
--- a/target/board/Android.mk
+++ b/target/board/Android.mk
@@ -19,8 +19,11 @@
 ifndef board_info_txt
 board_info_txt := $(wildcard $(TARGET_DEVICE_DIR)/board-info.txt)
 endif
-$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) build/make/tools/check_radio_versions.py
-	$(hide) build/make/tools/check_radio_versions.py $< $(BOARD_INFO_CHECK)
+CHECK_RADIO_VERSIONS := $(HOST_OUT_EXECUTABLES)/check_radio_versions$(HOST_EXECUTABLE_SUFFIX)
+$(INSTALLED_ANDROID_INFO_TXT_TARGET): $(board_info_txt) $(CHECK_RADIO_VERSIONS)
+	$(hide) $(CHECK_RADIO_VERSIONS) \
+		--board_info_txt $(board_info_txt) \
+		--board_info_check $(BOARD_INFO_CHECK)
 	$(call pretty,"Generated: ($@)")
 ifdef board_info_txt
 	$(hide) grep -v '#' $< > $@
@@ -30,6 +33,8 @@
 	$(hide) echo "" > $@
 endif
 
+$(call declare-0p-target,$(INSTALLED_ANDROID_INFO_TXT_TARGET))
+
 # Copy compatibility metadata to the device.
 
 # Device Manifest
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index 845225d..f6e64a1 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -87,6 +87,5 @@
 
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
-DEVICE_MATRIX_FILE   := device/generic/goldfish/compatibility_matrix.xml
 
 BOARD_SEPOLICY_DIRS += device/generic/goldfish/sepolicy/common
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 8e062ba..8c634f6 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -11,8 +11,11 @@
 # This flag is set by mainline but isn't desired for GSI.
 BOARD_USES_SYSTEM_OTHER_ODEX :=
 
-# system.img is always ext4 and non-sparsed.
+# system.img is ext4/erofs and non-sparsed.
+GSI_FILE_SYSTEM_TYPE ?= ext4
+BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE := $(GSI_FILE_SYSTEM_TYPE)
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
+TARGET_USERIMAGES_SPARSE_EROFS_DISABLED := true
 
 # GSI also includes make_f2fs to support userdata parition in f2fs
 # for some devices
@@ -35,6 +38,12 @@
 #   updating the last seen rollback index in the tamper-evident storage.
 BOARD_AVB_ROLLBACK_INDEX := 0
 
+# The chained vbmeta settings for boot images.
+BOARD_AVB_BOOT_KEY_PATH := external/avb/test/data/testkey_rsa4096.pem
+BOARD_AVB_BOOT_ALGORITHM := SHA256_RSA4096
+BOARD_AVB_BOOT_ROLLBACK_INDEX := $(PLATFORM_SECURITY_PATCH_TIMESTAMP)
+BOARD_AVB_BOOT_ROLLBACK_INDEX_LOCATION := 2
+
 # Enable AVB chained partition for system.
 # https://android.googlesource.com/platform/external/avb/+/master/README.md
 BOARD_AVB_SYSTEM_KEY_PATH := external/avb/test/data/testkey_rsa2048.pem
@@ -71,6 +80,3 @@
 
 # Setup a vendor image to let PRODUCT_VENDOR_PROPERTIES does not affect GSI
 BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE := ext4
-
-# Disable 64 bit mediadrmserver
-TARGET_ENABLE_MEDIADRM_64 :=
diff --git a/target/board/BoardConfigPixelCommon.mk b/target/board/BoardConfigPixelCommon.mk
index a970fec..22521b5 100644
--- a/target/board/BoardConfigPixelCommon.mk
+++ b/target/board/BoardConfigPixelCommon.mk
@@ -5,6 +5,7 @@
 # Using sha256 for dm-verity partitions. b/156162446
 # system, system_other, system_ext and product.
 BOARD_AVB_SYSTEM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
+BOARD_AVB_SYSTEM_DLKM_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
 BOARD_AVB_SYSTEM_OTHER_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
 BOARD_AVB_SYSTEM_EXT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
 BOARD_AVB_PRODUCT_ADD_HASHTREE_FOOTER_ARGS += --hash_algorithm sha256
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 45ed3da..40be80e 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -52,6 +52,9 @@
 TARGET_2ND_CPU_VARIANT := generic
 endif
 
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
 include build/make/target/board/BoardConfigGsiCommon.mk
 
 # Some vendors still haven't cleaned up all device specific directories under
diff --git a/target/board/generic_riscv64/BoardConfig.mk b/target/board/generic_riscv64/BoardConfig.mk
new file mode 100644
index 0000000..906f7f0
--- /dev/null
+++ b/target/board/generic_riscv64/BoardConfig.mk
@@ -0,0 +1,28 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# riscv64 emulator specific definitions
+TARGET_ARCH := riscv64
+TARGET_ARCH_VARIANT :=
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := riscv64
+
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
+include build/make/target/board/BoardConfigGsiCommon.mk
+
+# Temporary hack while prebuilt modules are missing riscv64.
+ALLOW_MISSING_DEPENDENCIES := true
diff --git a/target/board/generic_riscv64/README.txt b/target/board/generic_riscv64/README.txt
new file mode 100644
index 0000000..9811982
--- /dev/null
+++ b/target/board/generic_riscv64/README.txt
@@ -0,0 +1,7 @@
+The "generic_riscv64" product defines a non-hardware-specific riscv64 target
+without a bootloader.
+
+It is also the target to build the generic kernel image (GKI).
+
+It is not a product "base class"; no other products inherit
+from it or use it in any way.
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/generic_riscv64/device.mk
similarity index 90%
rename from target/product/iorap_large_memory_config.mk
rename to target/board/generic_riscv64/device.mk
index 0c6c89a..27a4175 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/generic_riscv64/device.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
diff --git a/target/board/generic_riscv64/system_ext.prop b/target/board/generic_riscv64/system_ext.prop
new file mode 100644
index 0000000..42c4ef5
--- /dev/null
+++ b/target/board/generic_riscv64/system_ext.prop
@@ -0,0 +1,5 @@
+#
+# system.prop for generic riscv64 sdk
+#
+
+rild.libpath=/vendor/lib64/libreference-ril.so
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 93694f2..e7f2ae0 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -22,6 +22,9 @@
 TARGET_2ND_ARCH := x86
 TARGET_2ND_ARCH_VARIANT := x86_64
 
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
 include build/make/target/board/BoardConfigGsiCommon.mk
 
 ifndef BUILDING_GSI
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
index db6f3f0..db95082 100644
--- a/target/board/gsi_arm64/BoardConfig.mk
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -27,6 +27,9 @@
 TARGET_2ND_CPU_ABI2 := armeabi
 TARGET_2ND_CPU_VARIANT := generic
 
+# Include 64-bit mediaserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_MEDIASERVER := true
+
 # TODO(b/111434759, b/111287060) SoC specific hacks
 BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
 BOARD_ROOT_EXTRA_SYMLINKS += /mnt/vendor/persist:/persist
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/linux_bionic/BoardConfig.mk
similarity index 62%
copy from target/product/iorap_large_memory_config.mk
copy to target/board/linux_bionic/BoardConfig.mk
index 0c6c89a..7938bdb 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/linux_bionic/BoardConfig.mk
@@ -12,3 +12,13 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+# This "device" is only intended to be used for host Bionic build targets, so
+# (device) target architectures are irrelevant. However, the build system isn't
+# prepared to handle no target architectures at all, so pick something
+# arbitrarily.
+TARGET_ARCH_SUITE := ndk
+
+HOST_CROSS_OS := linux_bionic
+HOST_CROSS_ARCH := x86_64
+HOST_CROSS_2ND_ARCH :=
diff --git a/target/board/linux_bionic/README.md b/target/board/linux_bionic/README.md
new file mode 100644
index 0000000..8db77f2
--- /dev/null
+++ b/target/board/linux_bionic/README.md
@@ -0,0 +1,6 @@
+This "device" is suitable for Soong-only builds to create Bionic binaries for
+Linux hosts:
+
+```
+build/soong/soong_ui.bash --make-mode --soong-only TARGET_PRODUCT=linux_bionic ...
+```
diff --git a/target/board/mainline_sdk/BoardConfig.mk b/target/board/mainline_sdk/BoardConfig.mk
index 84f8b2d..f5c2dc6 100644
--- a/target/board/mainline_sdk/BoardConfig.mk
+++ b/target/board/mainline_sdk/BoardConfig.mk
@@ -18,3 +18,6 @@
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := x86_64
 HOST_CROSS_2ND_ARCH :=
+
+# Required flag for non-64 bit devices from P.
+TARGET_USES_64_BIT_BINDER := true
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/module_arm64only/BoardConfig.mk
similarity index 70%
copy from target/product/iorap_large_memory_config.mk
copy to target/board/module_arm64only/BoardConfig.mk
index 0c6c89a..3cabf05 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/module_arm64only/BoardConfig.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+include build/make/target/board/BoardConfigModuleCommon.mk
+
+TARGET_ARCH := arm64
+TARGET_ARCH_VARIANT := armv8-a
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := arm64-v8a
diff --git a/target/board/module_arm64only/README.md b/target/board/module_arm64only/README.md
new file mode 100644
index 0000000..0dd1699
--- /dev/null
+++ b/target/board/module_arm64only/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an
+arm64 device. 32 bit binaries will not be built.
diff --git a/target/product/iorap_large_memory_config.mk b/target/board/module_x86_64only/BoardConfig.mk
similarity index 80%
copy from target/product/iorap_large_memory_config.mk
copy to target/board/module_x86_64only/BoardConfig.mk
index 0c6c89a..b0676cb 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/board/module_x86_64only/BoardConfig.mk
@@ -12,3 +12,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+include build/make/target/board/BoardConfigModuleCommon.mk
+
+TARGET_CPU_ABI := x86_64
+TARGET_ARCH := x86_64
+TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/module_x86_64only/README.md b/target/board/module_x86_64only/README.md
new file mode 100644
index 0000000..8fd7dc4
--- /dev/null
+++ b/target/board/module_x86_64only/README.md
@@ -0,0 +1,2 @@
+This device is suitable for an unbundled module targeted specifically to an
+x86_64 device. 32 bit binaries will not be built.
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index ee702e5..585630b 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -46,6 +46,7 @@
     $(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
     $(LOCAL_DIR)/aosp_arm64.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
+    $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
     $(LOCAL_DIR)/aosp_x86_arm.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
@@ -74,11 +75,14 @@
 endif
 
 PRODUCT_MAKEFILES += \
+    $(LOCAL_DIR)/linux_bionic.mk \
     $(LOCAL_DIR)/mainline_sdk.mk \
     $(LOCAL_DIR)/module_arm.mk \
     $(LOCAL_DIR)/module_arm64.mk \
+    $(LOCAL_DIR)/module_arm64only.mk \
     $(LOCAL_DIR)/module_x86.mk \
     $(LOCAL_DIR)/module_x86_64.mk \
+    $(LOCAL_DIR)/module_x86_64only.mk \
 
 COMMON_LUNCH_CHOICES := \
     aosp_arm64-eng \
diff --git a/target/product/OWNERS b/target/product/OWNERS
index b3d8998..61f7d45 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -3,3 +3,8 @@
 # GSI
 per-file gsi_release.mk = file:/target/product/gsi/OWNERS
 per-file developer_gsi_keys.mk = file:/target/product/gsi/OWNERS
+
+# Android Go
+per-file go_defaults.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_512.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults_common.mk = gkaiser@google.com, rajekumar@google.com
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index 01897b7..ffc37a9 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -43,6 +43,9 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
 
+# pKVM
+$(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+
 #
 # All components inherited here go to product image
 #
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
new file mode 100644
index 0000000..023317b
--- /dev/null
+++ b/target/product/aosp_riscv64.mk
@@ -0,0 +1,79 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_USE_DYNAMIC_PARTITIONS := true
+
+# The system image of aosp_riscv64-userdebug is a GSI for the devices with:
+# - riscv64 user space
+# - 64 bits binder interface
+# - system-as-root
+# - VNDK enforcement
+# - compatible property override enabled
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# GSI for system/product & support 64-bit apps only
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+#$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
+TARGET_FLATTEN_APEX := false
+
+#
+# All components inherited here go to system_ext image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
+
+#
+# All components inherited here go to product image
+#
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_product.mk)
+
+#
+# All components inherited here go to vendor image
+#
+$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
+
+#
+# Special settings for GSI releasing
+#
+ifeq (aosp_riscv64,$(TARGET_PRODUCT))
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+endif
+
+# TODO: this list should come via mainline_system.mk, but for now list
+# just the modules that work for riscv64.
+PRODUCT_PACKAGES := \
+  init.environ.rc \
+  init_system \
+  linker \
+  shell_and_utilities \
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
+PRODUCT_USES_DEFAULT_ART_CONFIG := false
+
+PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+    root/init.zygote64.rc
+
+# This build configuration supports 64-bit apps only
+PRODUCT_NAME := aosp_riscv64
+PRODUCT_DEVICE := generic_riscv64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on Riscv64
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index b3cfae4..d55866f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -45,6 +45,9 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/handheld_system_ext.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/telephony_system_ext.mk)
 
+# pKVM
+$(call inherit-product, packages/modules/Virtualization/apex/product_packages.mk)
+
 #
 # All components inherited here go to product image
 #
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 05ddfe5..96d7b2f 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -24,7 +24,7 @@
     android.hidl.manager-V1.0-java \
     android.hidl.memory@1.0-impl \
     android.hidl.memory@1.0-impl.vendor \
-    android.system.suspend@1.0-service \
+    android.system.suspend-service \
     android.test.base \
     android.test.mock \
     android.test.runner \
@@ -50,7 +50,9 @@
     charger \
     cmd \
     com.android.adbd \
+    com.android.adservices \
     com.android.appsearch \
+    com.android.btservices \
     com.android.conscrypt \
     com.android.cronet \
     com.android.extservices \
@@ -60,15 +62,16 @@
     com.android.media \
     com.android.media.swcodec \
     com.android.mediaprovider \
+    com.android.ondevicepersonalization \
     com.android.os.statsd \
     com.android.permission \
     com.android.resolv \
     com.android.neuralnetworks \
     com.android.scheduling \
     com.android.sdkext \
-    com.android.sepolicy \
     com.android.tethering \
     com.android.tzdata \
+    com.android.uwb \
     com.android.wifi \
     ContactsProvider \
     content \
@@ -218,6 +221,7 @@
     mke2fs \
     mkfs.erofs \
     monkey \
+    mtectrl \
     mtpd \
     ndc \
     netd \
@@ -235,6 +239,7 @@
     pppd \
     preinstalled-packages-platform.xml \
     privapp-permissions-platform.xml \
+    prng_seeder \
     racoon \
     recovery-persist \
     resize2fs \
@@ -271,7 +276,6 @@
     traced \
     traced_probes \
     tune2fs \
-    tzdatacheck \
     uiautomator \
     uinput \
     uncrypt \
@@ -314,6 +318,11 @@
   endif # EMMA_INSTRUMENT_STATIC
 endif # EMMA_INSTRUMENT
 
+# For testing purposes
+ifeq ($(FORCE_AUDIO_SILENT), true)
+    PRODUCT_SYSTEM_PROPERTIES += ro.audio.silent=1
+endif
+
 # Host tools to install
 PRODUCT_HOST_PACKAGES += \
     BugReport \
@@ -341,7 +350,6 @@
     sqlite3 \
     tinyplay \
     tune2fs \
-    tzdatacheck \
     unwind_info \
     unwind_reg_info \
     unwind_symbols \
@@ -368,7 +376,6 @@
 PRODUCT_PACKAGES_DEBUG := \
     adb_keys \
     arping \
-    com.android.sepolicy.cert-debug.der \
     dmuserd \
     idlcli \
     init-debug.rc \
@@ -380,7 +387,6 @@
     procrank \
     profcollectd \
     profcollectctl \
-    remount \
     servicedispatcher \
     showmap \
     sqlite3 \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 5004b85..7fb785c 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -29,6 +29,11 @@
     shell_and_utilities_recovery \
     watchdogd.recovery \
 
+PRODUCT_VENDOR_PROPERTIES += \
+    ro.recovery.usb.vid?=18D1 \
+    ro.recovery.usb.adb.pid?=D001 \
+    ro.recovery.usb.fastboot.pid?=4EE0 \
+
 # These had been pulled in via init_second_stage.recovery, but may not be needed.
 PRODUCT_HOST_PACKAGES += \
     e2fsdroid \
@@ -42,7 +47,6 @@
 # Base modules and settings for the vendor partition.
 PRODUCT_PACKAGES += \
     android.hardware.cas@1.2-service \
-    android.hardware.media.omx@1.0-service \
     boringssl_self_test_vendor \
     dumpsys_vendor \
     fs_config_files_nonsystem \
@@ -69,6 +73,14 @@
     selinux_policy_nonsystem \
     shell_and_utilities_vendor \
 
+# OMX not supported for 64bit_only builds
+# Only supported when SHIPPING_API_LEVEL is less than or equal to 33
+ifneq ($(TARGET_SUPPORTS_OMX_SERVICE),false)
+    PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33 += \
+        android.hardware.media.omx@1.0-service \
+
+endif
+
 # Base module when shipping api level is less than or equal to 29
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
     android.hardware.configstore@1.1-service \
diff --git a/target/product/core_64_bit.mk b/target/product/core_64_bit.mk
index 322fa80..e0c4d53 100644
--- a/target/product/core_64_bit.mk
+++ b/target/product/core_64_bit.mk
@@ -23,11 +23,17 @@
 # for 32-bit only.
 
 # Copy the 64-bit primary, 32-bit secondary zygote startup script
-PRODUCT_COPY_FILES += system/core/rootdir/init.zygote64_32.rc:system/etc/init/hw/init.zygote64_32.rc
+PRODUCT_COPY_FILES += \
+    system/core/rootdir/init.zygote64.rc:system/etc/init/hw/init.zygote64.rc \
+    system/core/rootdir/init.zygote64_32.rc:system/etc/init/hw/init.zygote64_32.rc \
 
 # Set the zygote property to select the 64-bit primary, 32-bit secondary script
 # This line must be parsed before the one in core_minimal.mk
+ifeq ($(ZYGOTE_FORCE_64),true)
+PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64
+else
 PRODUCT_VENDOR_PROPERTIES += ro.zygote=zygote64_32
+endif
 
 TARGET_SUPPORTS_32_BIT_APPS := true
 TARGET_SUPPORTS_64_BIT_APPS := true
diff --git a/target/product/core_64_bit_only.mk b/target/product/core_64_bit_only.mk
index 061728f..fc2b8e5 100644
--- a/target/product/core_64_bit_only.mk
+++ b/target/product/core_64_bit_only.mk
@@ -31,3 +31,4 @@
 
 TARGET_SUPPORTS_32_BIT_APPS := false
 TARGET_SUPPORTS_64_BIT_APPS := true
+TARGET_SUPPORTS_OMX_SERVICE := false
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 851a2cb..901302e 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -51,12 +51,16 @@
 # Note: core-icu4j is moved back to PRODUCT_BOOT_JARS in product_config.mk at a later stage.
 # Note: For modules available in Q, DO NOT add new entries here.
 PRODUCT_APEX_BOOT_JARS := \
+    com.android.adservices:framework-adservices \
+    com.android.adservices:framework-sdksandbox \
     com.android.appsearch:framework-appsearch \
+    com.android.btservices:framework-bluetooth \
     com.android.conscrypt:conscrypt \
     com.android.i18n:core-icu4j \
     com.android.ipsec:android.net.ipsec.ike \
     com.android.media:updatable-media \
     com.android.mediaprovider:framework-mediaprovider \
+    com.android.ondevicepersonalization:framework-ondevicepersonalization \
     com.android.os.statsd:framework-statsd \
     com.android.permission:framework-permission \
     com.android.permission:framework-permission-s \
@@ -65,18 +69,25 @@
     com.android.tethering:framework-connectivity \
     com.android.tethering:framework-connectivity-t \
     com.android.tethering:framework-tethering \
-    com.android.wifi:framework-wifi
+    com.android.uwb:framework-uwb \
+    com.android.wifi:framework-wifi \
 
 # List of system_server classpath jars delivered via apex.
 # Keep the list sorted by module names and then library names.
 # Note: For modules available in Q, DO NOT add new entries here.
 PRODUCT_APEX_SYSTEM_SERVER_JARS := \
+    com.android.adservices:service-adservices \
+    com.android.adservices:service-sdksandbox \
     com.android.appsearch:service-appsearch \
     com.android.art:service-art \
     com.android.media:service-media-s \
     com.android.permission:service-permission \
 
-PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+# Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
+# art project.
+ifneq (,$(wildcard art))
+  PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION += art/build/boot/boot-image-profile.txt
+endif
 
 # List of jars on the platform that system_server loads dynamically using separate classloaders.
 # Keep the list sorted library names.
@@ -86,9 +97,11 @@
 # Keep the list sorted by module names and then library names.
 # Note: For modules available in Q, DO NOT add new entries here.
 PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS := \
+    com.android.btservices:service-bluetooth \
     com.android.os.statsd:service-statsd \
     com.android.scheduling:service-scheduling \
     com.android.tethering:service-connectivity \
+    com.android.uwb:service-uwb \
     com.android.wifi:service-wifi \
 
 # Minimal configuration for running dex2oat (default argument values).
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
deleted file mode 100644
index 36da1f7..0000000
--- a/target/product/emulator.mk
+++ /dev/null
@@ -1,60 +0,0 @@
-#
-# Copyright (C) 2012 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-#
-# This file is included by other product makefiles to add all the
-# emulator-related modules to PRODUCT_PACKAGES.
-#
-
-# Device modules
-PRODUCT_PACKAGES += \
-    CarrierConfig \
-
-# need this for gles libraries to load properly
-# after moving to /vendor/lib/
-PRODUCT_PACKAGES += \
-    vndk-sp
-
-# WiFi: system side
-PRODUCT_PACKAGES += \
-	ip \
-	iw \
-	wificond \
-
-
-PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
-
-PRODUCT_CHARACTERISTICS := emulator
-
-PRODUCT_FULL_TREBLE_OVERRIDE := true
-
-# goldfish vendor partition configurations
-$(call inherit-product-if-exists, device/generic/goldfish/vendor.mk)
-
-#watchdog tiggers reboot because location service is not
-#responding, disble it for now.
-#still keep it on internal master as it is still working
-#once it is fixed in aosp, remove this block of comment.
-#PRODUCT_VENDOR_PROPERTIES += \
-#config.disable_location=true
-
-# enable Google-specific location features,
-# like NetworkLocationProvider and LocationCollector
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
-    ro.com.google.locationfeatures=1
-
-# disable setupwizard
-PRODUCT_SYSTEM_EXT_PROPERTIES += \
-    ro.setupwizard.mode=DISABLED
diff --git a/target/product/full.mk b/target/product/full.mk
index adb54ab..782280d 100644
--- a/target/product/full.mk
+++ b/target/product/full.mk
@@ -20,7 +20,7 @@
 # entirely appropriate to inherit from for on-device configurations.
 
 $(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic/device.mk)
 
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index a8e1e91..39c66da3 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -28,12 +28,6 @@
     PhotoTable \
     preinstalled-packages-platform-full-base.xml
 
-# Bluetooth:
-#   audio.a2dp.default is a system module. Generic system image includes
-#   audio.a2dp.default to support A2DP if board has the capability.
-PRODUCT_PACKAGES += \
-    audio.a2dp.default
-
 # Net:
 #   Vendors can use the platform-provided network configuration utilities (ip,
 #   iptable, etc.) to configure the Linux networking stack, but these utilities
diff --git a/target/product/full_x86.mk b/target/product/full_x86.mk
index 2f40c03..0f3be91 100644
--- a/target/product/full_x86.mk
+++ b/target/product/full_x86.mk
@@ -23,7 +23,7 @@
 # that isn't a wifi connection. This will instruct init.rc to enable the
 # network connection so that you can use it with ADB
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index fb0370e..c7dcd60 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -22,10 +22,7 @@
 # Ramdisk
 PRODUCT_PACKAGES += \
     init_first_stage \
-    e2fsck.ramdisk \
-    fsck.f2fs.ramdisk \
-    tune2fs.ramdisk \
-    snapuserd.ramdisk \
+    snapuserd_ramdisk \
 
 # Debug ramdisk
 PRODUCT_PACKAGES += \
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index f13c9db..1a639ef 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -52,11 +52,6 @@
     cppreopts.sh \
     otapreopt_script \
 
-# Bluetooth libraries
-PRODUCT_PACKAGES += \
-    audio.a2dp.default \
-    audio.hearing_aid.default \
-
 # For ringtones that rely on forward lock encryption
 PRODUCT_PACKAGES += libfwdlockengine
 
diff --git a/target/product/gsi/33.txt b/target/product/gsi/33.txt
new file mode 100644
index 0000000..03a143d
--- /dev/null
+++ b/target/product/gsi/33.txt
@@ -0,0 +1,254 @@
+LLNDK: libEGL.so
+LLNDK: libGLESv1_CM.so
+LLNDK: libGLESv2.so
+LLNDK: libGLESv3.so
+LLNDK: libRS.so
+LLNDK: libandroid_net.so
+LLNDK: libbinder_ndk.so
+LLNDK: libc.so
+LLNDK: libcgrouprc.so
+LLNDK: libdl.so
+LLNDK: libft2.so
+LLNDK: liblog.so
+LLNDK: libm.so
+LLNDK: libmediandk.so
+LLNDK: libnativewindow.so
+LLNDK: libneuralnetworks.so
+LLNDK: libselinux.so
+LLNDK: libsync.so
+LLNDK: libvndksupport.so
+LLNDK: libvulkan.so
+VNDK-SP: android.hardware.common-V2-ndk.so
+VNDK-SP: android.hardware.common.fmq-V1-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
+VNDK-SP: android.hardware.graphics.common@1.0.so
+VNDK-SP: android.hardware.graphics.common@1.1.so
+VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
+VNDK-SP: android.hardware.graphics.mapper@2.0.so
+VNDK-SP: android.hardware.graphics.mapper@2.1.so
+VNDK-SP: android.hardware.graphics.mapper@3.0.so
+VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.renderscript@1.0.so
+VNDK-SP: android.hidl.memory.token@1.0.so
+VNDK-SP: android.hidl.memory@1.0-impl.so
+VNDK-SP: android.hidl.memory@1.0.so
+VNDK-SP: android.hidl.safe_union@1.0.so
+VNDK-SP: libRSCpuRef.so
+VNDK-SP: libRSDriver.so
+VNDK-SP: libRS_internal.so
+VNDK-SP: libbacktrace.so
+VNDK-SP: libbase.so
+VNDK-SP: libbcinfo.so
+VNDK-SP: libblas.so
+VNDK-SP: libc++.so
+VNDK-SP: libcompiler_rt.so
+VNDK-SP: libcutils.so
+VNDK-SP: libdmabufheap.so
+VNDK-SP: libgralloctypes.so
+VNDK-SP: libhardware.so
+VNDK-SP: libhidlbase.so
+VNDK-SP: libhidlmemory.so
+VNDK-SP: libion.so
+VNDK-SP: libjsoncpp.so
+VNDK-SP: liblzma.so
+VNDK-SP: libprocessgroup.so
+VNDK-SP: libunwindstack.so
+VNDK-SP: libutils.so
+VNDK-SP: libutilscallstack.so
+VNDK-SP: libz.so
+VNDK-core: android.hardware.audio.common-V1-ndk.so
+VNDK-core: android.hardware.audio.common@2.0.so
+VNDK-core: android.hardware.authsecret-V1-ndk.so
+VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
+VNDK-core: android.hardware.bluetooth.audio-V2-ndk.so
+VNDK-core: android.hardware.camera.common-V1-ndk.so
+VNDK-core: android.hardware.camera.device-V1-ndk.so
+VNDK-core: android.hardware.camera.metadata-V1-ndk.so
+VNDK-core: android.hardware.camera.provider-V1-ndk.so
+VNDK-core: android.hardware.configstore-utils.so
+VNDK-core: android.hardware.configstore@1.0.so
+VNDK-core: android.hardware.configstore@1.1.so
+VNDK-core: android.hardware.confirmationui-support-lib.so
+VNDK-core: android.hardware.drm-V1-ndk.so
+VNDK-core: android.hardware.dumpstate-V1-ndk.so
+VNDK-core: android.hardware.gnss-V2-ndk.so
+VNDK-core: android.hardware.graphics.allocator@2.0.so
+VNDK-core: android.hardware.graphics.allocator@3.0.so
+VNDK-core: android.hardware.graphics.allocator@4.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health-V1-ndk.so
+VNDK-core: android.hardware.health.storage-V1-ndk.so
+VNDK-core: android.hardware.identity-V4-ndk.so
+VNDK-core: android.hardware.ir-V1-ndk.so
+VNDK-core: android.hardware.keymaster-V3-ndk.so
+VNDK-core: android.hardware.light-V2-ndk.so
+VNDK-core: android.hardware.media.bufferpool@2.0.so
+VNDK-core: android.hardware.media.omx@1.0.so
+VNDK-core: android.hardware.media@1.0.so
+VNDK-core: android.hardware.memtrack-V1-ndk.so
+VNDK-core: android.hardware.memtrack@1.0.so
+VNDK-core: android.hardware.nfc-V1-ndk.so
+VNDK-core: android.hardware.oemlock-V1-ndk.so
+VNDK-core: android.hardware.power-V3-ndk.so
+VNDK-core: android.hardware.power.stats-V1-ndk.so
+VNDK-core: android.hardware.radio-V1-ndk.so
+VNDK-core: android.hardware.radio.config-V1-ndk.so
+VNDK-core: android.hardware.radio.data-V1-ndk.so
+VNDK-core: android.hardware.radio.messaging-V1-ndk.so
+VNDK-core: android.hardware.radio.modem-V1-ndk.so
+VNDK-core: android.hardware.radio.network-V1-ndk.so
+VNDK-core: android.hardware.radio.sim-V1-ndk.so
+VNDK-core: android.hardware.radio.voice-V1-ndk.so
+VNDK-core: android.hardware.rebootescrow-V1-ndk.so
+VNDK-core: android.hardware.security.dice-V1-ndk.so
+VNDK-core: android.hardware.security.keymint-V2-ndk.so
+VNDK-core: android.hardware.security.secureclock-V1-ndk.so
+VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
+VNDK-core: android.hardware.sensors-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger3-V1-ndk.so
+VNDK-core: android.hardware.soundtrigger@2.0-core.so
+VNDK-core: android.hardware.soundtrigger@2.0.so
+VNDK-core: android.hardware.usb-V1-ndk.so
+VNDK-core: android.hardware.uwb-V1-ndk.so
+VNDK-core: android.hardware.vibrator-V2-ndk.so
+VNDK-core: android.hardware.weaver-V1-ndk.so
+VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
+VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
+VNDK-core: android.hidl.token@1.0-utils.so
+VNDK-core: android.hidl.token@1.0.so
+VNDK-core: android.media.audio.common.types-V1-ndk.so
+VNDK-core: android.media.soundtrigger.types-V1-ndk.so
+VNDK-core: android.system.keystore2-V2-ndk.so
+VNDK-core: android.system.suspend-V1-ndk.so
+VNDK-core: android.system.suspend@1.0.so
+VNDK-core: libaudioroute.so
+VNDK-core: libaudioutils.so
+VNDK-core: libbinder.so
+VNDK-core: libbufferqueueconverter.so
+VNDK-core: libcamera_metadata.so
+VNDK-core: libcap.so
+VNDK-core: libcn-cbor.so
+VNDK-core: libcodec2.so
+VNDK-core: libcrypto.so
+VNDK-core: libcrypto_utils.so
+VNDK-core: libcurl.so
+VNDK-core: libdiskconfig.so
+VNDK-core: libdumpstateutil.so
+VNDK-core: libevent.so
+VNDK-core: libexif.so
+VNDK-core: libexpat.so
+VNDK-core: libfmq.so
+VNDK-core: libgatekeeper.so
+VNDK-core: libgui.so
+VNDK-core: libhardware_legacy.so
+VNDK-core: libhidlallocatorutils.so
+VNDK-core: libjpeg.so
+VNDK-core: libldacBT_abr.so
+VNDK-core: libldacBT_enc.so
+VNDK-core: liblz4.so
+VNDK-core: libmedia_helper.so
+VNDK-core: libmedia_omx.so
+VNDK-core: libmemtrack.so
+VNDK-core: libminijail.so
+VNDK-core: libmkbootimg_abi_check.so
+VNDK-core: libnetutils.so
+VNDK-core: libnl.so
+VNDK-core: libpcre2.so
+VNDK-core: libpiex.so
+VNDK-core: libpng.so
+VNDK-core: libpower.so
+VNDK-core: libprocinfo.so
+VNDK-core: libradio_metadata.so
+VNDK-core: libspeexresampler.so
+VNDK-core: libsqlite.so
+VNDK-core: libssl.so
+VNDK-core: libstagefright_bufferpool@2.0.so
+VNDK-core: libstagefright_bufferqueue_helper.so
+VNDK-core: libstagefright_foundation.so
+VNDK-core: libstagefright_omx.so
+VNDK-core: libstagefright_omx_utils.so
+VNDK-core: libstagefright_xmlparser.so
+VNDK-core: libsysutils.so
+VNDK-core: libtinyalsa.so
+VNDK-core: libtinyxml2.so
+VNDK-core: libui.so
+VNDK-core: libusbhost.so
+VNDK-core: libwifi-system-iface.so
+VNDK-core: libxml2.so
+VNDK-core: libyuv.so
+VNDK-core: libziparchive.so
+VNDK-private: libbacktrace.so
+VNDK-private: libblas.so
+VNDK-private: libcompiler_rt.so
+VNDK-private: libft2.so
+VNDK-private: libgui.so
+VNDK-product: android.hardware.audio.common@2.0.so
+VNDK-product: android.hardware.configstore@1.0.so
+VNDK-product: android.hardware.configstore@1.1.so
+VNDK-product: android.hardware.graphics.allocator@2.0.so
+VNDK-product: android.hardware.graphics.allocator@3.0.so
+VNDK-product: android.hardware.graphics.allocator@4.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@1.0.so
+VNDK-product: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-product: android.hardware.graphics.common@1.0.so
+VNDK-product: android.hardware.graphics.common@1.1.so
+VNDK-product: android.hardware.graphics.common@1.2.so
+VNDK-product: android.hardware.graphics.mapper@2.0.so
+VNDK-product: android.hardware.graphics.mapper@2.1.so
+VNDK-product: android.hardware.graphics.mapper@3.0.so
+VNDK-product: android.hardware.graphics.mapper@4.0.so
+VNDK-product: android.hardware.media.bufferpool@2.0.so
+VNDK-product: android.hardware.media.omx@1.0.so
+VNDK-product: android.hardware.media@1.0.so
+VNDK-product: android.hardware.memtrack@1.0.so
+VNDK-product: android.hardware.renderscript@1.0.so
+VNDK-product: android.hardware.soundtrigger@2.0.so
+VNDK-product: android.hidl.memory.token@1.0.so
+VNDK-product: android.hidl.memory@1.0.so
+VNDK-product: android.hidl.safe_union@1.0.so
+VNDK-product: android.hidl.token@1.0.so
+VNDK-product: android.system.suspend@1.0.so
+VNDK-product: libaudioutils.so
+VNDK-product: libbacktrace.so
+VNDK-product: libbase.so
+VNDK-product: libc++.so
+VNDK-product: libcamera_metadata.so
+VNDK-product: libcap.so
+VNDK-product: libcompiler_rt.so
+VNDK-product: libcrypto.so
+VNDK-product: libcurl.so
+VNDK-product: libcutils.so
+VNDK-product: libevent.so
+VNDK-product: libexpat.so
+VNDK-product: libfmq.so
+VNDK-product: libhidlbase.so
+VNDK-product: libhidlmemory.so
+VNDK-product: libion.so
+VNDK-product: libjpeg.so
+VNDK-product: libjsoncpp.so
+VNDK-product: libldacBT_abr.so
+VNDK-product: libldacBT_enc.so
+VNDK-product: liblz4.so
+VNDK-product: liblzma.so
+VNDK-product: libminijail.so
+VNDK-product: libnl.so
+VNDK-product: libpcre2.so
+VNDK-product: libpiex.so
+VNDK-product: libpng.so
+VNDK-product: libprocessgroup.so
+VNDK-product: libprocinfo.so
+VNDK-product: libspeexresampler.so
+VNDK-product: libssl.so
+VNDK-product: libtinyalsa.so
+VNDK-product: libtinyxml2.so
+VNDK-product: libunwindstack.so
+VNDK-product: libutils.so
+VNDK-product: libutilscallstack.so
+VNDK-product: libwifi-system-iface.so
+VNDK-product: libxml2.so
+VNDK-product: libyuv.so
+VNDK-product: libz.so
+VNDK-product: libziparchive.so
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 85e551d..d02dc7a 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -185,6 +185,10 @@
     $(addsuffix .vendor,$(VNDK_SAMEPROCESS_LIBRARIES)) \
     $(VNDK_USING_CORE_VARIANT_LIBRARIES) \
     com.android.vndk.current
+
+LOCAL_ADDITIONAL_DEPENDENCIES += $(call module-built-files,\
+    $(addsuffix .vendor,$(VNDK_CORE_LIBRARIES) $(VNDK_SAMEPROCESS_LIBRARIES)))
+
 endif
 include $(BUILD_PHONY_PACKAGE)
 
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 1f27f52..56a9d8a 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -21,10 +21,12 @@
 LLNDK: libvulkan.so
 VNDK-SP: android.hardware.common-V2-ndk.so
 VNDK-SP: android.hardware.common.fmq-V1-ndk.so
-VNDK-SP: android.hardware.graphics.common-V2-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
+VNDK-SP: android.hardware.graphics.common-V3-ndk.so
 VNDK-SP: android.hardware.graphics.common@1.0.so
 VNDK-SP: android.hardware.graphics.common@1.1.so
 VNDK-SP: android.hardware.graphics.common@1.2.so
+VNDK-SP: android.hardware.graphics.composer3-V1-ndk.so
 VNDK-SP: android.hardware.graphics.mapper@2.0.so
 VNDK-SP: android.hardware.graphics.mapper@2.1.so
 VNDK-SP: android.hardware.graphics.mapper@3.0.so
@@ -37,7 +39,6 @@
 VNDK-SP: libRSCpuRef.so
 VNDK-SP: libRSDriver.so
 VNDK-SP: libRS_internal.so
-VNDK-SP: libbacktrace.so
 VNDK-SP: libbase.so
 VNDK-SP: libbcinfo.so
 VNDK-SP: libblas.so
@@ -57,59 +58,25 @@
 VNDK-SP: libutils.so
 VNDK-SP: libutilscallstack.so
 VNDK-SP: libz.so
-VNDK-core: android.hardware.audio.common-V1-ndk.so
 VNDK-core: android.hardware.audio.common@2.0.so
-VNDK-core: android.hardware.authsecret-V1-ndk.so
-VNDK-core: android.hardware.automotive.occupant_awareness-V1-ndk.so
-VNDK-core: android.hardware.bluetooth.audio-V1-ndk.so
 VNDK-core: android.hardware.configstore-utils.so
 VNDK-core: android.hardware.configstore@1.0.so
 VNDK-core: android.hardware.configstore@1.1.so
 VNDK-core: android.hardware.confirmationui-support-lib.so
-VNDK-core: android.hardware.dumpstate-V1-ndk.so
-VNDK-core: android.hardware.gnss-V1-ndk.so
 VNDK-core: android.hardware.graphics.allocator@2.0.so
 VNDK-core: android.hardware.graphics.allocator@3.0.so
 VNDK-core: android.hardware.graphics.allocator@4.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
-VNDK-core: android.hardware.health-V1-ndk.so
-VNDK-core: android.hardware.health.storage-V1-ndk.so
-VNDK-core: android.hardware.identity-V3-ndk.so
-VNDK-core: android.hardware.keymaster-V3-ndk.so
-VNDK-core: android.hardware.light-V1-ndk.so
 VNDK-core: android.hardware.media.bufferpool@2.0.so
 VNDK-core: android.hardware.media.omx@1.0.so
 VNDK-core: android.hardware.media@1.0.so
 VNDK-core: android.hardware.memtrack-V1-ndk.so
 VNDK-core: android.hardware.memtrack@1.0.so
-VNDK-core: android.hardware.nfc-V1-ndk.so
-VNDK-core: android.hardware.oemlock-V1-ndk.so
-VNDK-core: android.hardware.power-V2-ndk.so
-VNDK-core: android.hardware.power.stats-V1-ndk.so
-VNDK-core: android.hardware.radio-V1-ndk.so
-VNDK-core: android.hardware.radio.config-V1-ndk.so
-VNDK-core: android.hardware.radio.data-V1-ndk.so
-VNDK-core: android.hardware.radio.messaging-V1-ndk.so
-VNDK-core: android.hardware.radio.modem-V1-ndk.so
-VNDK-core: android.hardware.radio.network-V1-ndk.so
-VNDK-core: android.hardware.radio.sim-V1-ndk.so
-VNDK-core: android.hardware.radio.voice-V1-ndk.so
-VNDK-core: android.hardware.rebootescrow-V1-ndk.so
-VNDK-core: android.hardware.security.dice-V1-ndk.so
-VNDK-core: android.hardware.security.keymint-V1-ndk.so
-VNDK-core: android.hardware.security.secureclock-V1-ndk.so
-VNDK-core: android.hardware.security.sharedsecret-V1-ndk.so
 VNDK-core: android.hardware.soundtrigger@2.0-core.so
 VNDK-core: android.hardware.soundtrigger@2.0.so
-VNDK-core: android.hardware.vibrator-V2-ndk.so
-VNDK-core: android.hardware.weaver-V1-ndk.so
-VNDK-core: android.hardware.wifi.hostapd-V1-ndk.so
-VNDK-core: android.hardware.wifi.supplicant-V1-ndk.so
 VNDK-core: android.hidl.token@1.0-utils.so
 VNDK-core: android.hidl.token@1.0.so
-VNDK-core: android.media.audio.common.types-V1-ndk.so
-VNDK-core: android.system.keystore2-V1-ndk.so
 VNDK-core: android.system.suspend-V1-ndk.so
 VNDK-core: android.system.suspend@1.0.so
 VNDK-core: libaudioroute.so
@@ -168,7 +135,6 @@
 VNDK-core: libxml2.so
 VNDK-core: libyuv.so
 VNDK-core: libziparchive.so
-VNDK-private: libbacktrace.so
 VNDK-private: libblas.so
 VNDK-private: libcompiler_rt.so
 VNDK-private: libft2.so
@@ -200,7 +166,6 @@
 VNDK-product: android.hidl.token@1.0.so
 VNDK-product: android.system.suspend@1.0.so
 VNDK-product: libaudioutils.so
-VNDK-product: libbacktrace.so
 VNDK-product: libbase.so
 VNDK-product: libc++.so
 VNDK-product: libcamera_metadata.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 74501cd..3705a50 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -23,6 +23,8 @@
 # - Released GSI contains more VNDK packages to support old version vendors
 # - etc.
 #
+# See device/generic/common/README.md for more details.
+#
 
 BUILDING_GSI := true
 
@@ -62,13 +64,18 @@
     init.gsi.rc \
     init.vndk-nodef.rc \
 
+# Overlay the GSI specific SystemUI setting
+PRODUCT_PACKAGES += gsi_overlay_systemui
+PRODUCT_COPY_FILES += \
+    device/generic/common/overlays/overlay-config.xml:$(TARGET_COPY_OUT_SYSTEM_EXT)/overlay/config/config.xml
+
 # Support additional VNDK snapshots
 PRODUCT_EXTRA_VNDK_VERSIONS := \
-    28 \
     29 \
     30 \
     31 \
     32 \
+    33 \
 
 # Do not build non-GSI partition images.
 PRODUCT_BUILD_CACHE_IMAGE := false
diff --git a/target/product/handheld_product.mk b/target/product/handheld_product.mk
index 2199c57..8755ae6 100644
--- a/target/product/handheld_product.mk
+++ b/target/product/handheld_product.mk
@@ -30,7 +30,6 @@
     Gallery2 \
     LatinIME \
     Music \
-    OneTimeInitializer \
     preinstalled-packages-platform-handheld-product.xml \
     QuickSearchBox \
     SettingsIntelligence \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 3a59f6c..41233b2 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -34,7 +34,6 @@
 PRODUCT_PACKAGES += \
     BasicDreams \
     BlockedNumberProvider \
-    Bluetooth \
     BluetoothMidiService \
     BookmarkProvider \
     BuiltInPrintService \
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/linux_bionic.mk
similarity index 79%
copy from target/product/iorap_large_memory_config.mk
copy to target/product/linux_bionic.mk
index 0c6c89a..da6b890 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/product/linux_bionic.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +12,7 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+PRODUCT_NAME := linux_bionic
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := linux_bionic
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/module_arm64only.mk
similarity index 65%
copy from target/product/iorap_large_memory_config.mk
copy to target/product/module_arm64only.mk
index 0c6c89a..4e8d53e 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/product/module_arm64only.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +13,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+
+PRODUCT_NAME := module_arm64only
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_arm64only
diff --git a/target/product/iorap_large_memory_config.mk b/target/product/module_x86_64only.mk
similarity index 65%
copy from target/product/iorap_large_memory_config.mk
copy to target/product/module_x86_64only.mk
index 0c6c89a..bca4541 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/target/product/module_x86_64only.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2020 The Android Open Source Project
+#
+# Copyright (C) 2021 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,3 +13,10 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
+
+PRODUCT_NAME := module_x86_64only
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := module_x86_64only
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index b6560fc..a62cda7 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -157,3 +157,21 @@
     dalvik.vm.madvise.vdexfile.size=104857600 \
     dalvik.vm.madvise.odexfile.size=104857600 \
     dalvik.vm.madvise.artfile.size=4294967295
+
+# Properties for the Unspecialized App Process Pool
+PRODUCT_SYSTEM_PROPERTIES += \
+    dalvik.vm.usap_pool_enabled?=false \
+    dalvik.vm.usap_refill_threshold?=1 \
+    dalvik.vm.usap_pool_size_max?=3 \
+    dalvik.vm.usap_pool_size_min?=1 \
+    dalvik.vm.usap_pool_refill_delay_ms?=3000
+
+# Allow dexopt files that are side-effects of already allowlisted files.
+# This is only necessary when ART is prebuilt.
+ifeq (false,$(ART_MODULE_BUILD_FROM_SOURCE))
+  PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+      system/framework/%.art \
+      system/framework/%.oat \
+      system/framework/%.odex \
+      system/framework/%.vdex
+endif
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 96d8cc9..fa7e1ad 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,8 +14,11 @@
 # limitations under the License.
 #
 
-# Don't modify this file - It's just an alias!
+# This is a simple product that uses configures the minimum amount
+# needed to build the SDK (without the emulator).
 
-$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_phone_armv7.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/languages_default.mk)
 
 PRODUCT_NAME := sdk
+PRODUCT_BRAND := Android
+PRODUCT_DEVICE := mainline_x86
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
index ad25a92..4bd8efc 100644
--- a/target/product/security/Android.mk
+++ b/target/product/security/Android.mk
@@ -1,43 +1,6 @@
 LOCAL_PATH:= $(call my-dir)
 
 #######################################
-# verity_key (installed to /, i.e. part of system.img)
-include $(CLEAR_VARS)
-
-LOCAL_MODULE := verity_key
-LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-LOCAL_LICENSE_CONDITIONS := notice
-LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-LOCAL_SRC_FILES := $(LOCAL_MODULE)
-LOCAL_MODULE_CLASS := ETC
-LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
-
-# For devices using a separate ramdisk, we need a copy there to establish the chain of trust.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-LOCAL_REQUIRED_MODULES := verity_key_ramdisk
-endif
-
-include $(BUILD_PREBUILT)
-
-#######################################
-# verity_key (installed to ramdisk)
-#
-# Enabling the target when using system-as-root would cause build failure, as TARGET_RAMDISK_OUT
-# points to the same location as TARGET_ROOT_OUT.
-ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-  include $(CLEAR_VARS)
-  LOCAL_MODULE := verity_key_ramdisk
-  LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
-  LOCAL_LICENSE_CONDITIONS := notice
-  LOCAL_NOTICE_FILE := build/soong/licenses/LICENSE
-  LOCAL_MODULE_CLASS := ETC
-  LOCAL_SRC_FILES := verity_key
-  LOCAL_MODULE_STEM := verity_key
-  LOCAL_MODULE_PATH := $(TARGET_RAMDISK_OUT)
-  include $(BUILD_PREBUILT)
-endif
-
-#######################################
 # adb key, if configured via PRODUCT_ADB_KEYS
 ifdef PRODUCT_ADB_KEYS
   ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
diff --git a/target/product/security/bluetooth.pk8 b/target/product/security/bluetooth.pk8
new file mode 100644
index 0000000..c6ea434
--- /dev/null
+++ b/target/product/security/bluetooth.pk8
Binary files differ
diff --git a/target/product/security/bluetooth.x509.pem b/target/product/security/bluetooth.x509.pem
new file mode 100644
index 0000000..396d7c9
--- /dev/null
+++ b/target/product/security/bluetooth.x509.pem
@@ -0,0 +1,36 @@
+-----BEGIN CERTIFICATE-----
+MIIGOzCCBCOgAwIBAgIUEiZapaWZVSter06CJMf2kHi8PIswDQYJKoZIhvcNAQEL
+BQAwgasxCzAJBgNVBAYTAlVTMRMwEQYDVQQIDApDYWxpZm9ybmlhMRYwFAYDVQQH
+DA1Nb3VudGFpbiBWaWV3MRAwDgYDVQQKDAdBbmRyb2lkMRAwDgYDVQQLDAdBbmRy
+b2lkMScwJQYDVQQDDB5jb20uYW5kcm9pZC5ibHVldG9vdGguc2VydmljZXMxIjAg
+BgkqhkiG9w0BCQEWE2FuZHJvaWRAYW5kcm9pZC5jb20wIBcNMjIwMzE1MDAzNjAz
+WhgPNDc2MDAyMDkwMDM2MDNaMIGrMQswCQYDVQQGEwJVUzETMBEGA1UECAwKQ2Fs
+aWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4GA1UECgwHQW5kcm9p
+ZDEQMA4GA1UECwwHQW5kcm9pZDEnMCUGA1UEAwweY29tLmFuZHJvaWQuYmx1ZXRv
+b3RoLnNlcnZpY2VzMSIwIAYJKoZIhvcNAQkBFhNhbmRyb2lkQGFuZHJvaWQuY29t
+MIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAsVlq9pozUREGlb8u8Y0A
+fYwPs5OuavNx/EsX03aTjmAXUfSOMAewqzUXDIRjw8UQvOW63utaZ0go9osDPzNf
+VEftmGxW/AUC+HWGaLDQfCYO3ficPPOS7xpEhGZERNbnhvh5qX0NBt6mJygsfpOm
+RPThbi6Ig2Brxh1eqVYqRkTjhNFKD6gCd1PdMmUSF88xEYaZWvTkET89Zh38lLza
+2x/wfNZmCSAVurNw1Kf9NQfYsaGHwMsjrvTyhG93TTYXzRBFzAO2WlBiw6R0tQr8
+ZW5XCM9Yo6AS0KXiU0ZWwOXxhGdr38rNd7j9nZtpFwWmN1kgeb/vpEfq0Ylua9By
+uURnfJZu2K4TbFamuyjihItra2ZKOtFNPDeuggKMCkuZz6WU8FCoMEpnq5P2agxN
+OGAa7ynXdNzek98N3TGX8qtfEgCv6vyuM0gakJ6D9nM43nsCm1LkB/JA0CacWyRz
+ljaLL1C4S43azEOYyOOb94ITnkZCQGtH33kxzamyPLIZ37VF4+v6yTXySLBzOnhe
+Os5uBIDohVJuI838bLhZf8e5mIrnjiKwsmExXiQvgidbwvZKCz9n8YT4iUhWPx4F
+W+GPcivZsvsECcnJ2QURK1zhir5QuLS7ZbAth4kiEUxJ6ujF5jftE+L/ClK2LiY0
+2IXWRCct8J1hfJZZx8lm3PUCAwEAAaNTMFEwHQYDVR0OBBYEFO5CgtQzKbTEd/Q9
+rxK14a9BBwFZMB8GA1UdIwQYMBaAFO5CgtQzKbTEd/Q9rxK14a9BBwFZMA8GA1Ud
+EwEB/wQFMAMBAf8wDQYJKoZIhvcNAQELBQADggIBAGrGS1zmaoARVq7qhoY+xzSc
+1I/Tzf6vG6aHBC+CcIoSM2oqr6TGH+ADHAY6jhu/qzv1ij3gtoInAkBtkWvYsCIV
+eISPj8Qomcd8EIeW77p+ArKzS4HY5m1c/O4D/5rkl6c0exFq4Pdw9V8xyM98QtLd
+oj4xzzXUTPOIwkROHkj8otcML28m/MC0l/4b+flHnPqKFuLBjhxi9b/ZfwaXfjkx
+TcXpM3nPH8zN7kaJpS1fPW1IJyxJYvT022uK+afpezTmyS/50aOncUGjDJRw8CcO
+B88O8lpizDD3tD7P6jVOpRRJS4SnkVErbIn1xdWER6ubhnnycH7UmDVIx+vNd/t6
+YDa377au8Za+LnbDPfV1+Og+RaJSEIjJgfYyqnjBxGdRGN21VbqJdRzo/eO4ZFd2
+mGVtMosVr0jw4O8r60o9oMMWBTbFpxOI929QdcV+X1Lz8A8BZz0faXfZ2Z9usctu
+W2FtZge3tsJ07z7kuhNdbnm2yQVfd0FqiJsapUjlhgcdFVoDWPuqOfWAoG31ble6
+eiNnxfjiCckPWyciIE6lw97nvavGjlUacH5qVG86hOWU7xyBgeQ0PH4e+Nxr50yU
+A0GMxni1gefZFG8qEPdNRuDT1QdqDGh/8Ea11GEUMXdAxk0UzqyAtLDr6MbwK6lV
+mqmeueFdogdjvQ3mXe94
+-----END CERTIFICATE-----
diff --git a/target/product/security/verity.pk8 b/target/product/security/verity.pk8
deleted file mode 100644
index bebf216..0000000
--- a/target/product/security/verity.pk8
+++ /dev/null
Binary files differ
diff --git a/target/product/security/verity.x509.pem b/target/product/security/verity.x509.pem
deleted file mode 100644
index 86399c3..0000000
--- a/target/product/security/verity.x509.pem
+++ /dev/null
@@ -1,24 +0,0 @@
------BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
deleted file mode 100644
index 31982d9..0000000
--- a/target/product/security/verity_key
+++ /dev/null
Binary files differ
diff --git a/target/product/verity.mk b/target/product/verity.mk
deleted file mode 100644
index 5f09283..0000000
--- a/target/product/verity.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-#
-# Copyright (C) 2014 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Provides dependencies necessary for verified boot.
-
-PRODUCT_SUPPORTS_BOOT_SIGNER := true
-PRODUCT_SUPPORTS_VERITY := true
-PRODUCT_SUPPORTS_VERITY_FEC := true
-
-# The dev key is used to sign boot and recovery images, and the verity
-# metadata table. Actual product deliverables will be re-signed by hand.
-# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
-PRODUCT_VERITY_SIGNING_KEY := build/make/target/product/security/verity
-
-PRODUCT_PACKAGES += \
-        verity_key
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 18e08e4..716c8e0 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -38,15 +38,3 @@
 PRODUCT_PACKAGES += \
     snapuserd \
 
-# For dedicated recovery partitions, we need to include snapuserd
-# For GKI devices, BOARD_USES_RECOVERY_AS_BOOT is empty, but
-# so is BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT.
-ifdef BUILDING_RECOVERY_IMAGE
-ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-ifneq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-PRODUCT_PACKAGES += \
-    snapuserd.recovery
-endif
-endif
-endif
-
diff --git a/target/product/iorap_large_memory_config.mk b/tests/artifact_path_requirements/inherit1.rbc
similarity index 63%
copy from target/product/iorap_large_memory_config.mk
copy to tests/artifact_path_requirements/inherit1.rbc
index 0c6c89a..dcef1bf 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/tests/artifact_path_requirements/inherit1.rbc
@@ -1,14 +1,21 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright 2022 Google LLC
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE-2.0
+#      https://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit3.rbc", _inherit3_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/inherit3", _inherit3_init)
diff --git a/tests/artifact_path_requirements/inherit2.rbc b/tests/artifact_path_requirements/inherit2.rbc
new file mode 100644
index 0000000..597b4e9
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit2.rbc
@@ -0,0 +1,22 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit4.rbc", _inherit4_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/inherit4", _inherit4_init)
+  rblf.require_artifacts_in_path(handle, "vendor/", "")
diff --git a/tests/artifact_path_requirements/inherit3.rbc b/tests/artifact_path_requirements/inherit3.rbc
new file mode 100644
index 0000000..597b4e9
--- /dev/null
+++ b/tests/artifact_path_requirements/inherit3.rbc
@@ -0,0 +1,22 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit4.rbc", _inherit4_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.inherit(handle, "test/inherit4", _inherit4_init)
+  rblf.require_artifacts_in_path(handle, "vendor/", "")
diff --git a/target/product/iorap_large_memory_config.mk b/tests/artifact_path_requirements/inherit4.rbc
similarity index 61%
copy from target/product/iorap_large_memory_config.mk
copy to tests/artifact_path_requirements/inherit4.rbc
index 0c6c89a..52028fe 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/tests/artifact_path_requirements/inherit4.rbc
@@ -1,14 +1,21 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright 2022 Google LLC
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE-2.0
+#      https://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  rblf.setdefault(handle, "PRODUCT_COPY_FILES")
+  cfg["PRODUCT_COPY_FILES"] += ["foo/bar/baz.txt:vendor/etc/baz.txt"]
diff --git a/tests/artifact_path_requirements/product.rbc b/tests/artifact_path_requirements/product.rbc
new file mode 100644
index 0000000..7d1f169
--- /dev/null
+++ b/tests/artifact_path_requirements/product.rbc
@@ -0,0 +1,24 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit1.rbc", _inherit1_init = "init")
+load(":inherit2.rbc", _inherit2_init = "init")
+load(":inherit3.rbc", _inherit3_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+  rblf.inherit(handle, "test/inherit1", _inherit1_init)
+  rblf.inherit(handle, "test/inherit2", _inherit2_init)
+  rblf.inherit(handle, "test/inherit3", _inherit3_init)
diff --git a/tests/artifact_path_requirements/test.rbc b/tests/artifact_path_requirements/test.rbc
new file mode 100644
index 0000000..0a344d1
--- /dev/null
+++ b/tests/artifact_path_requirements/test.rbc
@@ -0,0 +1,27 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+def assert_eq(expected, actual):
+    if expected != actual:
+        fail("Expected '%s', got '%s'" % (expected, actual))
+
+def test():
+    (globals, globals_base) = rblf.product_configuration("test/product", init, input_variables_init)
+    assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/product.mk.PRODUCT_COPY_FILES"])
+    assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/inherit2.mk.PRODUCT_COPY_FILES"])
+    assert_eq(["foo/bar/baz.txt:vendor/etc/baz.txt"], globals["PRODUCTS.test/inherit3.mk.PRODUCT_COPY_FILES"])
diff --git a/tests/b_tests.sh b/tests/b_tests.sh
new file mode 100755
index 0000000..45cb4f7
--- /dev/null
+++ b/tests/b_tests.sh
@@ -0,0 +1,36 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# These commands are expected to always return successfully
+
+trap 'exit 1' ERR
+
+source $(dirname $0)/../envsetup.sh
+
+# lunch required to set up PATH to use b
+lunch aosp_arm64
+
+test_target=//build/bazel/scripts/difftool:difftool
+
+b build "$test_target"
+b build "$test_target" --run-soong-tests
+b build --run-soong-tests "$test_target"
+b --run-soong-tests build "$test_target"
+b cquery 'kind(test, //build/bazel/examples/android_app/...)' --config=android
+b run $test_target -- --help >/dev/null
+
+# Workflow tests for bmod
+bmod libm
+b run $(bmod fastboot) -- help
+b build $(bmod libm) $(bmod libcutils) --config=android
diff --git a/tests/envsetup_tests.sh b/tests/envsetup_tests.sh
index abdcd56..6b41766 100755
--- a/tests/envsetup_tests.sh
+++ b/tests/envsetup_tests.sh
@@ -1,37 +1,22 @@
 #!/bin/bash -e
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
 
-source $(dirname $0)/../envsetup.sh
-
-unset TARGET_PRODUCT TARGET_BUILD_VARIANT TARGET_PLATFORM_VERSION
-
-function check_lunch
-(
-    echo lunch $1
-    set +e
-    lunch $1 > /dev/null 2> /dev/null
-    set -e
-    [ "$TARGET_PRODUCT" = "$2" ] || ( echo "lunch $1: expected TARGET_PRODUCT='$2', got '$TARGET_PRODUCT'" && exit 1 )
-    [ "$TARGET_BUILD_VARIANT" = "$3" ] || ( echo "lunch $1: expected TARGET_BUILD_VARIANT='$3', got '$TARGET_BUILD_VARIANT'" && exit 1 )
-    [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
+tests=(
+ $(dirname $0)/lunch_tests.sh
 )
 
-default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
-valid_version=PPR1
-
-# lunch tests
-check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
-check_lunch "aosp_arm64-userdebug"                      "aosp_arm64" "userdebug" ""
-check_lunch "aosp_arm64-userdebug-$default_version"     "aosp_arm64" "userdebug" "$default_version"
-check_lunch "aosp_arm64-userdebug-$valid_version"       "aosp_arm64" "userdebug" "$valid_version"
-check_lunch "abc"                                       "" "" ""
-check_lunch "aosp_arm64-abc"                            "" "" ""
-check_lunch "aosp_arm64-userdebug-abc"                  "" "" ""
-check_lunch "aosp_arm64-abc-$valid_version"             "" "" ""
-check_lunch "abc-userdebug-$valid_version"              "" "" ""
-check_lunch "-"                                         "" "" ""
-check_lunch "--"                                        "" "" ""
-check_lunch "-userdebug"                                "" "" ""
-check_lunch "-userdebug-"                               "" "" ""
-check_lunch "-userdebug-$valid_version"                 "" "" ""
-check_lunch "aosp_arm64-userdebug-$valid_version-"      "" "" ""
-check_lunch "aosp_arm64-userdebug-$valid_version-abc"   "" "" ""
+for test in $tests; do
+  bash -x $test
+done
diff --git a/tests/lunch_tests.sh b/tests/lunch_tests.sh
new file mode 100755
index 0000000..4285d13
--- /dev/null
+++ b/tests/lunch_tests.sh
@@ -0,0 +1,48 @@
+#!/usr/bin/env bash
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+source $(dirname $0)/../envsetup.sh
+
+unset TARGET_PRODUCT TARGET_BUILD_VARIANT TARGET_PLATFORM_VERSION
+
+function check_lunch
+(
+    echo lunch $1
+    set +e
+    lunch $1 > /dev/null 2> /dev/null
+    set -e
+    [ "$TARGET_PRODUCT" = "$2" ] || ( echo "lunch $1: expected TARGET_PRODUCT='$2', got '$TARGET_PRODUCT'" && exit 1 )
+    [ "$TARGET_BUILD_VARIANT" = "$3" ] || ( echo "lunch $1: expected TARGET_BUILD_VARIANT='$3', got '$TARGET_BUILD_VARIANT'" && exit 1 )
+    [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
+)
+
+default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
+
+# lunch tests
+check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
+check_lunch "aosp_arm64-userdebug"                      "aosp_arm64" "userdebug" ""
+check_lunch "aosp_arm64-userdebug-$default_version"     "aosp_arm64" "userdebug" "$default_version"
+check_lunch "abc"                                       "" "" ""
+check_lunch "aosp_arm64-abc"                            "" "" ""
+check_lunch "aosp_arm64-userdebug-abc"                  "" "" ""
+check_lunch "aosp_arm64-abc-$default_version"             "" "" ""
+check_lunch "abc-userdebug-$default_version"              "" "" ""
+check_lunch "-"                                         "" "" ""
+check_lunch "--"                                        "" "" ""
+check_lunch "-userdebug"                                "" "" ""
+check_lunch "-userdebug-"                               "" "" ""
+check_lunch "-userdebug-$default_version"                 "" "" ""
+check_lunch "aosp_arm64-userdebug-$default_version-"      "" "" ""
+check_lunch "aosp_arm64-userdebug-$default_version-abc"   "" "" ""
diff --git a/target/product/iorap_large_memory_config.mk b/tests/prefixed_sort_order/base-secondary.rbc
similarity index 67%
copy from target/product/iorap_large_memory_config.mk
copy to tests/prefixed_sort_order/base-secondary.rbc
index 0c6c89a..5446e8f 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/tests/prefixed_sort_order/base-secondary.rbc
@@ -1,14 +1,21 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright 2022 Google LLC
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE-2.0
+#      https://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  g.setdefault("MY_VAR", [])
+  g["MY_VAR"] += ["foo"]
diff --git a/target/product/iorap_large_memory_config.mk b/tests/prefixed_sort_order/base.rbc
similarity index 67%
copy from target/product/iorap_large_memory_config.mk
copy to tests/prefixed_sort_order/base.rbc
index 0c6c89a..05b0d5d 100644
--- a/target/product/iorap_large_memory_config.mk
+++ b/tests/prefixed_sort_order/base.rbc
@@ -1,14 +1,21 @@
-# Copyright (C) 2020 The Android Open Source Project
+# Copyright 2022 Google LLC
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE-2.0
+#      https://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  g.setdefault("MY_VAR", [])
+  g["MY_VAR"] += ["bar"]
diff --git a/tests/prefixed_sort_order/product.rbc b/tests/prefixed_sort_order/product.rbc
new file mode 100644
index 0000000..619b2c0
--- /dev/null
+++ b/tests/prefixed_sort_order/product.rbc
@@ -0,0 +1,29 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":base.rbc", _base_init = "init")
+load(":base-secondary.rbc", _base_secondary_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  # It's important that base-secondary uses a dash, an underscore won't expose the sort order issue:
+  # >>> sorted(["base", "base-secondary"])
+  # ['base', 'base-secondary']
+  # >>> sorted(["base.mk", "base-secondary.mk"])
+  # ['base-secondary.mk', 'base.mk']
+
+  rblf.inherit(handle, "base", _base_init)
+  rblf.inherit(handle, "base-secondary", _base_secondary_init)
diff --git a/tests/prefixed_sort_order/test.rbc b/tests/prefixed_sort_order/test.rbc
new file mode 100644
index 0000000..e59a509
--- /dev/null
+++ b/tests/prefixed_sort_order/test.rbc
@@ -0,0 +1,26 @@
+# Copyright 2022 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+
+def assert_eq(expected, actual):
+    if expected != actual:
+        fail("Expected '%s', got '%s'" % (expected, actual))
+
+def test():
+    (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+    assert_eq(["foo", "bar"], globals["MY_VAR"])
diff --git a/target/product/iorap_large_memory_config.mk b/tests/roboleaf_tests.sh
old mode 100644
new mode 100755
similarity index 70%
copy from target/product/iorap_large_memory_config.mk
copy to tests/roboleaf_tests.sh
index 0c6c89a..2d13766
--- a/target/product/iorap_large_memory_config.mk
+++ b/tests/roboleaf_tests.sh
@@ -1,14 +1,22 @@
-# Copyright (C) 2020 The Android Open Source Project
+#!/bin/bash -e
+# Copyright (C) 2022 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
 # You may obtain a copy of the License at
 #
-#      http://www.apache.org/licenses/LICENSE-2.0
+#     http://www.apache.org/licenses/LICENSE-2.0
 #
 # Unless required by applicable law or agreed to in writing, software
 # distributed under the License is distributed on an "AS IS" BASIS,
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-#
+
+tests=(
+ $(dirname $0)/b_tests.sh
+)
+
+for test in $tests; do
+  bash -x $test
+done
diff --git a/tests/run.rbc b/tests/run.rbc
index a9b1673..c6dfeba 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -26,15 +26,24 @@
 load(":board.rbc", board_init = "init")
 load(":board_input_vars.rbc", board_input_vars_init = "init")
 load("//build/make/tests/single_value_inheritance:test.rbc", test_single_value_inheritance = "test")
+load("//build/make/tests/artifact_path_requirements:test.rbc", test_artifact_path_requirements = "test")
+load("//build/make/tests/prefixed_sort_order:test.rbc", test_prefixed_sort_order = "test")
 
 def assert_eq(expected, actual):
     if expected != actual:
         fail("Expected '%s', got '%s'" % (expected, actual))
 
+def assert_dict_subset(expected, actual):
+    for key, val in expected.items():
+        assert_eq(val, actual[key])
+
 # Unit tests for non-trivial runtime functions
+assert_eq(["a", "b", "c"], rblf.mksort("b a    c c"))
+assert_eq(["a", "b", "c"], rblf.mksort(["b", "a", "c", "c"]))
+
 assert_eq("", rblf.mkstrip(" \n \t    "))
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
-assert_eq(1, rblf.mkstrip(1))
+assert_eq("1", rblf.mkstrip("1 "))
 
 assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
 assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
@@ -53,13 +62,38 @@
 assert_eq(["from/a:to/a", "from/b:to/b"], rblf.product_copy_files_by_pattern("from/%", "to/%", "a b"))
 
 assert_eq([], rblf.filter(["a", "", "b"], "f"))
-assert_eq(["", "b"], rblf.filter_out(["a", "" ], ["a", "", "b"] ))
+assert_eq(["ab%c", "axyzb%c"], rblf.filter(["a%b%c"], ["ab%c", "axyzb%c", "axyzb%cd", "axyzbwc"]))
+assert_eq(["abc", "bcd"], rblf.filter(["a%", "b%"], ["abc", "def", "bcd", "xabc"]))
+assert_eq(["b", "ab"], rblf.filter_out(["a", "" ], ["a", "", "b", "ab"]))
+assert_eq(["c"], rblf.filter_out(["a", "b" ], ["a", "b", "c"]))
+assert_eq(["c"], rblf.filter_out(["a%", "b" ], ["abc", "b", "c"]))
 
 assert_eq("foo.c no_folder", rblf.notdir(["src/foo.c", "no_folder"]))
 assert_eq("foo.c no_folder", rblf.notdir("src/foo.c no_folder"))
 assert_eq("", rblf.notdir("/"))
 assert_eq("", rblf.notdir(""))
 
+cwd = rblf_shell('pwd')
+assert_eq(cwd+"/foo/bar", rblf.abspath("foo/bar"))
+assert_eq(cwd+"/bar", rblf.abspath("foo/.././bar"))
+assert_eq(cwd+"/bar", rblf.abspath("foo/..////bar//"))
+assert_eq("/foo/baz", rblf.abspath("/foo/bar/../baz"))
+assert_eq(cwd+"/foo/bar "+cwd+"/foo/baz", rblf.abspath("foo/bar foo/baz"))
+assert_eq("/baz", rblf.abspath("/../../../../../../../../../../../../../../../../baz"))
+
+assert_eq("foo", rblf.first_word("foo bar"))
+assert_eq("foo", rblf.first_word(["foo", "bar"]))
+assert_eq("", rblf.first_word(""))
+assert_eq("", rblf.first_word([]))
+assert_eq("bar", rblf.last_word("foo bar"))
+assert_eq("bar", rblf.last_word(["foo", "bar"]))
+assert_eq("", rblf.last_word(""))
+assert_eq("", rblf.last_word([]))
+
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo", "bar"]]))
+assert_eq(["foo", "bar"], rblf.flatten_2d_list([["foo"], ["bar"]]))
+assert_eq([], rblf.flatten_2d_list([]))
+
 assert_eq(
     ["build/make/tests/board.rbc", "build/make/tests/board_input_vars.rbc"],
     rblf.expand_wildcard("build/make/tests/board*.rbc")
@@ -73,31 +107,28 @@
     rblf.expand_wildcard("build/make/tests/run.rbc build/make/tests/nonexistent.rbc")
 )
 
-(globals, config, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
-assert_eq(
-    {
-      "PRODUCT_COPY_FILES": [
-          "part_from:part_to",
-          "device_from:device_to",
-          "device/google/redfin/audio/audio_platform_info_noextcodec_snd.xml:||VENDOR-PATH-PH||/etc/audio/audio_platform_info_noextcodec_snd.xml",
-          "xyz:/etc/xyz",
-          "x.xml:/etc/x.xml",
-          "y.xml:/etc/y.xml",
-          "from/sub/x:to/x",
-          "from/sub/y:to/y",
-      ],
-      "PRODUCT_HOST_PACKAGES": ["host"],
-      "PRODUCT_PACKAGES": [
-          "dev",
-          "inc",
-          "dev_after",
-          "board1_in",
-          "board1_is",
-      ],
-      "PRODUCT_PRODUCT_PROPERTIES": ["part_properties"]
-    },
-    { k:v for k, v in sorted(config.items()) }
-)
+(globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+assert_dict_subset({
+    "PRODUCTS.test/device.mk.PRODUCT_COPY_FILES": [
+        "part_from:part_to",
+        "device_from:device_to",
+        "device/google/redfin/audio/audio_platform_info_noextcodec_snd.xml:||VENDOR-PATH-PH||/etc/audio/audio_platform_info_noextcodec_snd.xml",
+        "xyz:/etc/xyz",
+        "x.xml:/etc/x.xml",
+        "y.xml:/etc/y.xml",
+        "from/sub/x:to/x",
+        "from/sub/y:to/y",
+    ],
+    "PRODUCTS.test/device.mk.PRODUCT_HOST_PACKAGES": ["host"],
+    "PRODUCTS.test/device.mk.PRODUCT_PACKAGES": [
+        "dev",
+        "inc",
+        "dev_after",
+        "board1_in",
+        "board1_is",
+    ],
+    "PRODUCTS.test/device.mk.PRODUCT_PRODUCT_PROPERTIES": ["part_properties"]
+}, globals)
 
 ns = globals["$SOONG_CONFIG_NAMESPACES"]
 assert_eq(
@@ -127,8 +158,10 @@
     { k:v for k,v in sorted(goals.items()) }
 )
 
-(board_globals, board_config, board_globals_base) = rblf.board_configuration(board_init, board_input_vars_init)
+(board_globals, board_globals_base) = rblf.board_configuration(board_init, board_input_vars_init)
 assert_eq({"A_LIST_VARIABLE": ["foo", "bar"]}, board_globals)
 assert_eq({"A_LIST_VARIABLE": ["foo"]}, board_globals_base)
 
 test_single_value_inheritance()
+test_artifact_path_requirements()
+test_prefixed_sort_order()
diff --git a/tests/single_value_inheritance/inherit1.rbc b/tests/single_value_inheritance/inherit1.rbc
index b71ffb3..0cc98a9 100644
--- a/tests/single_value_inheritance/inherit1.rbc
+++ b/tests/single_value_inheritance/inherit1.rbc
@@ -19,3 +19,5 @@
 
   cfg["PRODUCT_CHARACTERISTICS"] = "tablet"
   cfg["PRODUCT_DEFAULT_DEV_CERTIFICATE"] = "vendor/myvendor/certs/devkeys/devkey"
+  cfg.setdefault("PRODUCT_PACKAGES", [])
+  cfg["PRODUCT_PACKAGES"] += ["bar"]
diff --git a/tests/single_value_inheritance/inherit2.rbc b/tests/single_value_inheritance/inherit2.rbc
index be85866..ed5e569 100644
--- a/tests/single_value_inheritance/inherit2.rbc
+++ b/tests/single_value_inheritance/inherit2.rbc
@@ -18,3 +18,5 @@
   cfg = rblf.cfg(handle)
 
   cfg["PRODUCT_CHARACTERISTICS"] = "nosdcard"
+  cfg.setdefault("PRODUCT_PACKAGES", [])
+  cfg["PRODUCT_PACKAGES"] += ["foo"]
diff --git a/tests/single_value_inheritance/test.rbc b/tests/single_value_inheritance/test.rbc
index 07a5e65..e4f44f4 100644
--- a/tests/single_value_inheritance/test.rbc
+++ b/tests/single_value_inheritance/test.rbc
@@ -22,6 +22,7 @@
         fail("Expected '%s', got '%s'" % (expected, actual))
 
 def test():
-    (globals, config, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
-    assert_eq("tablet", config["PRODUCT_CHARACTERISTICS"])
-    assert_eq("vendor/myvendor/certs/devkeys/devkey", config["PRODUCT_DEFAULT_DEV_CERTIFICATE"])
+    (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+    assert_eq("tablet", globals["PRODUCTS.test/device.mk.PRODUCT_CHARACTERISTICS"])
+    assert_eq("vendor/myvendor/certs/devkeys/devkey", globals["PRODUCTS.test/device.mk.PRODUCT_DEFAULT_DEV_CERTIFICATE"])
+    assert_eq(["foo", "bar"], globals["PRODUCTS.test/device.mk.PRODUCT_PACKAGES"])
diff --git a/tools/Android.bp b/tools/Android.bp
index 6601c60..1f0d406 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -49,3 +49,18 @@
   out: ["kernel_release.txt"],
   cmd: "$(location) --tools lz4:$(location lz4) --input $(in) --output-release > $(out)"
 }
+
+cc_binary_host {
+  name: "build-runfiles",
+  srcs: ["build-runfiles.cc"],
+}
+
+python_binary_host {
+  name: "check_radio_versions",
+  srcs: ["check_radio_versions.py"],
+}
+
+python_binary_host {
+  name: "check_elf_file",
+  srcs: ["check_elf_file.py"],
+}
diff --git a/tools/BUILD.bazel b/tools/BUILD.bazel
index 3170820..0de178b 100644
--- a/tools/BUILD.bazel
+++ b/tools/BUILD.bazel
@@ -1,20 +1,27 @@
 py_library(
-    name="event_log_tags",
+    name = "event_log_tags",
     srcs = ["event_log_tags.py"],
 )
 
 py_binary(
-    name="java-event-log-tags",
-    srcs=["java-event-log-tags.py"],
-    deps=[":event_log_tags"],
-    visibility = ["//visibility:public"],
+    name = "java-event-log-tags",
+    srcs = ["java-event-log-tags.py"],
     python_version = "PY3",
+    visibility = ["//visibility:public"],
+    deps = [":event_log_tags"],
 )
 
 py_binary(
-    name="merge-event-log-tags",
-    srcs=["merge-event-log-tags.py"],
-    deps=[":event_log_tags"],
-    visibility = ["//visibility:public"],
+    name = "merge-event-log-tags",
+    srcs = ["merge-event-log-tags.py"],
     python_version = "PY3",
+    visibility = ["//visibility:public"],
+    deps = [":event_log_tags"],
+)
+
+py_binary(
+    name = "check_elf_file",
+    srcs = ["check_elf_file.py"],
+    python_version = "PY3",
+    visibility = ["//visibility:public"],
 )
diff --git a/tools/build-runfiles.cc b/tools/build-runfiles.cc
new file mode 100644
index 0000000..b6197f0
--- /dev/null
+++ b/tools/build-runfiles.cc
@@ -0,0 +1,426 @@
+// Copyright 2014 The Bazel Authors. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// This program creates a "runfiles tree" from a "runfiles manifest".
+//
+// The command line arguments are an input manifest INPUT and an output
+// directory RUNFILES. First, the files in the RUNFILES directory are scanned
+// and any extraneous ones are removed. Second, any missing files are created.
+// Finally, a copy of the input manifest is written to RUNFILES/MANIFEST.
+//
+// The input manifest consists of lines, each containing a relative path within
+// the runfiles, a space, and an optional absolute path.  If this second path
+// is present, a symlink is created pointing to it; otherwise an empty file is
+// created.
+//
+// Given the line
+//   <workspace root>/output/path /real/path
+// we will create directories
+//   RUNFILES/<workspace root>
+//   RUNFILES/<workspace root>/output
+// a symlink
+//   RUNFILES/<workspace root>/output/path -> /real/path
+// and the output manifest will contain a line
+//   <workspace root>/output/path /real/path
+//
+// If --use_metadata is supplied, every other line is treated as opaque
+// metadata, and is ignored here.
+//
+// All output paths must be relative and generally (but not always) begin with
+// <workspace root>. No output path may be equal to another.  No output path may
+// be a path prefix of another.
+
+#define _FILE_OFFSET_BITS 64
+
+#include <dirent.h>
+#include <err.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <limits.h>
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <sys/stat.h>
+#include <unistd.h>
+
+#include <map>
+#include <string>
+
+// program_invocation_short_name is not portable.
+static const char *argv0;
+
+const char *input_filename;
+const char *output_base_dir;
+
+enum FileType {
+  FILE_TYPE_REGULAR,
+  FILE_TYPE_DIRECTORY,
+  FILE_TYPE_SYMLINK
+};
+
+struct FileInfo {
+  FileType type;
+  std::string symlink_target;
+
+  bool operator==(const FileInfo &other) const {
+    return type == other.type && symlink_target == other.symlink_target;
+  }
+
+  bool operator!=(const FileInfo &other) const {
+    return !(*this == other);
+  }
+};
+
+typedef std::map<std::string, FileInfo> FileInfoMap;
+
+class RunfilesCreator {
+ public:
+  explicit RunfilesCreator(const std::string &output_base)
+      : output_base_(output_base),
+        output_filename_("MANIFEST"),
+        temp_filename_(output_filename_ + ".tmp") {
+    SetupOutputBase();
+    if (chdir(output_base_.c_str()) != 0) {
+      err(2, "chdir '%s'", output_base_.c_str());
+    }
+  }
+
+  void ReadManifest(const std::string &manifest_file, bool allow_relative,
+                    bool use_metadata) {
+    FILE *outfile = fopen(temp_filename_.c_str(), "w");
+    if (!outfile) {
+      err(2, "opening '%s/%s' for writing", output_base_.c_str(),
+           temp_filename_.c_str());
+    }
+    FILE *infile = fopen(manifest_file.c_str(), "r");
+    if (!infile) {
+      err(2, "opening '%s' for reading", manifest_file.c_str());
+    }
+
+    // read input manifest
+    int lineno = 0;
+    char buf[3 * PATH_MAX];
+    while (fgets(buf, sizeof buf, infile)) {
+      // copy line to output manifest
+      if (fputs(buf, outfile) == EOF) {
+        err(2, "writing to '%s/%s'", output_base_.c_str(),
+             temp_filename_.c_str());
+      }
+
+      // parse line
+      ++lineno;
+      // Skip metadata lines. They are used solely for
+      // dependency checking.
+      if (use_metadata && lineno % 2 == 0) continue;
+
+      char *tok = strtok(buf, " \n");
+      if (tok == nullptr) {
+        continue;
+      } else if (*tok == '/') {
+        errx(2, "%s:%d: paths must not be absolute", input_filename, lineno);
+      }
+      std::string link(tok);
+
+      const char *target = strtok(nullptr, " \n");
+      if (target == nullptr) {
+        target = "";
+      } else if (strtok(nullptr, " \n") != nullptr) {
+        errx(2, "%s:%d: link or target filename contains space", input_filename, lineno);
+      } else if (!allow_relative && target[0] != '/') {
+        errx(2, "%s:%d: expected absolute path", input_filename, lineno);
+      }
+
+      FileInfo *info = &manifest_[link];
+      if (target[0] == '\0') {
+        // No target means an empty file.
+        info->type = FILE_TYPE_REGULAR;
+      } else {
+        info->type = FILE_TYPE_SYMLINK;
+        info->symlink_target = target;
+      }
+
+      FileInfo parent_info;
+      parent_info.type = FILE_TYPE_DIRECTORY;
+
+      while (true) {
+        int k = link.rfind('/');
+        if (k < 0) break;
+        link.erase(k, std::string::npos);
+        if (!manifest_.insert(std::make_pair(link, parent_info)).second) break;
+      }
+    }
+    if (fclose(outfile) != 0) {
+      err(2, "writing to '%s/%s'", output_base_.c_str(),
+           temp_filename_.c_str());
+    }
+    fclose(infile);
+
+    // Don't delete the temp manifest file.
+    manifest_[temp_filename_].type = FILE_TYPE_REGULAR;
+  }
+
+  void CreateRunfiles() {
+    if (unlink(output_filename_.c_str()) != 0 && errno != ENOENT) {
+      err(2, "removing previous file at '%s/%s'", output_base_.c_str(),
+           output_filename_.c_str());
+    }
+
+    ScanTreeAndPrune(".");
+    CreateFiles();
+
+    // rename output file into place
+    if (rename(temp_filename_.c_str(), output_filename_.c_str()) != 0) {
+      err(2, "renaming '%s/%s' to '%s/%s'",
+           output_base_.c_str(), temp_filename_.c_str(),
+           output_base_.c_str(), output_filename_.c_str());
+    }
+  }
+
+ private:
+  void SetupOutputBase() {
+    struct stat st;
+    if (stat(output_base_.c_str(), &st) != 0) {
+      // Technically, this will cause problems if the user's umask contains
+      // 0200, but we don't care. Anyone who does that deserves what's coming.
+      if (mkdir(output_base_.c_str(), 0777) != 0) {
+        err(2, "creating directory '%s'", output_base_.c_str());
+      }
+    } else {
+      EnsureDirReadAndWritePerms(output_base_);
+    }
+  }
+
+  void ScanTreeAndPrune(const std::string &path) {
+    // A note on non-empty files:
+    // We don't distinguish between empty and non-empty files. That is, if
+    // there's a file that has contents, we don't truncate it here, even though
+    // the manifest supports creation of empty files, only. Given that
+    // .runfiles are *supposed* to be immutable, this shouldn't be a problem.
+    EnsureDirReadAndWritePerms(path);
+
+    struct dirent *entry;
+    DIR *dh = opendir(path.c_str());
+    if (!dh) {
+      err(2, "opendir '%s'", path.c_str());
+    }
+
+    errno = 0;
+    const std::string prefix = (path == "." ? "" : path + "/");
+    while ((entry = readdir(dh)) != nullptr) {
+      if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+
+      std::string entry_path = prefix + entry->d_name;
+      FileInfo actual_info;
+      actual_info.type = DentryToFileType(entry_path, entry);
+
+      if (actual_info.type == FILE_TYPE_SYMLINK) {
+        ReadLinkOrDie(entry_path, &actual_info.symlink_target);
+      }
+
+      FileInfoMap::iterator expected_it = manifest_.find(entry_path);
+      if (expected_it == manifest_.end() ||
+          expected_it->second != actual_info) {
+        DelTree(entry_path, actual_info.type);
+      } else {
+        manifest_.erase(expected_it);
+        if (actual_info.type == FILE_TYPE_DIRECTORY) {
+          ScanTreeAndPrune(entry_path);
+        }
+      }
+
+      errno = 0;
+    }
+    if (errno != 0) {
+      err(2, "reading directory '%s'", path.c_str());
+    }
+    closedir(dh);
+  }
+
+  void CreateFiles() {
+    for (FileInfoMap::const_iterator it = manifest_.begin();
+         it != manifest_.end(); ++it) {
+      const std::string &path = it->first;
+      switch (it->second.type) {
+        case FILE_TYPE_DIRECTORY:
+          if (mkdir(path.c_str(), 0777) != 0) {
+            err(2, "mkdir '%s'", path.c_str());
+          }
+          break;
+        case FILE_TYPE_REGULAR:
+          {
+            int fd = open(path.c_str(), O_CREAT|O_EXCL|O_WRONLY, 0555);
+            if (fd < 0) {
+              err(2, "creating empty file '%s'", path.c_str());
+            }
+            close(fd);
+          }
+          break;
+        case FILE_TYPE_SYMLINK:
+          {
+            const std::string& target = it->second.symlink_target;
+            if (symlink(target.c_str(), path.c_str()) != 0) {
+              err(2, "symlinking '%s' -> '%s'", path.c_str(), target.c_str());
+            }
+          }
+          break;
+      }
+    }
+  }
+
+  FileType DentryToFileType(const std::string &path, struct dirent *ent) {
+#ifdef _DIRENT_HAVE_D_TYPE
+    if (ent->d_type != DT_UNKNOWN) {
+      if (ent->d_type == DT_DIR) {
+        return FILE_TYPE_DIRECTORY;
+      } else if (ent->d_type == DT_LNK) {
+        return FILE_TYPE_SYMLINK;
+      } else {
+        return FILE_TYPE_REGULAR;
+      }
+    } else  // NOLINT (the brace is in the next line)
+#endif
+    {
+      struct stat st;
+      LStatOrDie(path, &st);
+      if (S_ISDIR(st.st_mode)) {
+        return FILE_TYPE_DIRECTORY;
+      } else if (S_ISLNK(st.st_mode)) {
+        return FILE_TYPE_SYMLINK;
+      } else {
+        return FILE_TYPE_REGULAR;
+      }
+    }
+  }
+
+  void LStatOrDie(const std::string &path, struct stat *st) {
+    if (lstat(path.c_str(), st) != 0) {
+      err(2, "lstating file '%s'", path.c_str());
+    }
+  }
+
+  void StatOrDie(const std::string &path, struct stat *st) {
+    if (stat(path.c_str(), st) != 0) {
+      err(2, "stating file '%s'", path.c_str());
+    }
+  }
+
+  void ReadLinkOrDie(const std::string &path, std::string *output) {
+    char readlink_buffer[PATH_MAX];
+    int sz = readlink(path.c_str(), readlink_buffer, sizeof(readlink_buffer));
+    if (sz < 0) {
+      err(2, "reading symlink '%s'", path.c_str());
+    }
+    // readlink returns a non-null terminated string.
+    std::string(readlink_buffer, sz).swap(*output);
+  }
+
+  void EnsureDirReadAndWritePerms(const std::string &path) {
+    const int kMode = 0700;
+    struct stat st;
+    LStatOrDie(path, &st);
+    if ((st.st_mode & kMode) != kMode) {
+      int new_mode = st.st_mode | kMode;
+      if (chmod(path.c_str(), new_mode) != 0) {
+        err(2, "chmod '%s'", path.c_str());
+      }
+    }
+  }
+
+  bool DelTree(const std::string &path, FileType file_type) {
+    if (file_type != FILE_TYPE_DIRECTORY) {
+      if (unlink(path.c_str()) != 0) {
+        err(2, "unlinking '%s'", path.c_str());
+        return false;
+      }
+      return true;
+    }
+
+    EnsureDirReadAndWritePerms(path);
+
+    struct dirent *entry;
+    DIR *dh = opendir(path.c_str());
+    if (!dh) {
+      err(2, "opendir '%s'", path.c_str());
+    }
+    errno = 0;
+    while ((entry = readdir(dh)) != nullptr) {
+      if (!strcmp(entry->d_name, ".") || !strcmp(entry->d_name, "..")) continue;
+      const std::string entry_path = path + '/' + entry->d_name;
+      FileType entry_file_type = DentryToFileType(entry_path, entry);
+      DelTree(entry_path, entry_file_type);
+      errno = 0;
+    }
+    if (errno != 0) {
+      err(2, "readdir '%s'", path.c_str());
+    }
+    closedir(dh);
+    if (rmdir(path.c_str()) != 0) {
+      err(2, "rmdir '%s'", path.c_str());
+    }
+    return true;
+  }
+
+ private:
+  std::string output_base_;
+  std::string output_filename_;
+  std::string temp_filename_;
+
+  FileInfoMap manifest_;
+};
+
+int main(int argc, char **argv) {
+  argv0 = argv[0];
+
+  argc--; argv++;
+  bool allow_relative = false;
+  bool use_metadata = false;
+
+  while (argc >= 1) {
+    if (strcmp(argv[0], "--allow_relative") == 0) {
+      allow_relative = true;
+      argc--; argv++;
+    } else if (strcmp(argv[0], "--use_metadata") == 0) {
+      use_metadata = true;
+      argc--; argv++;
+    } else {
+      break;
+    }
+  }
+
+  if (argc != 2) {
+    fprintf(stderr, "usage: %s "
+            "[--allow_relative] [--use_metadata] "
+            "INPUT RUNFILES\n",
+            argv0);
+    return 1;
+  }
+
+  input_filename = argv[0];
+  output_base_dir = argv[1];
+
+  std::string manifest_file = input_filename;
+  if (input_filename[0] != '/') {
+    char cwd_buf[PATH_MAX];
+    if (getcwd(cwd_buf, sizeof(cwd_buf)) == nullptr) {
+      err(2, "getcwd failed");
+    }
+    manifest_file = std::string(cwd_buf) + '/' + manifest_file;
+  }
+
+  RunfilesCreator runfiles_creator(output_base_dir);
+  runfiles_creator.ReadManifest(manifest_file, allow_relative, use_metadata);
+  runfiles_creator.CreateRunfiles();
+
+  return 0;
+}
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 536a381..c2e36df 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -30,9 +30,6 @@
 echo "ro.build.host=$BUILD_HOSTNAME"
 echo "ro.build.tags=$BUILD_VERSION_TAGS"
 echo "ro.build.flavor=$TARGET_BUILD_FLAVOR"
-if [ -n "$BOARD_BUILD_SYSTEM_ROOT_IMAGE" ] ; then
-  echo "ro.build.system_root_image=$BOARD_BUILD_SYSTEM_ROOT_IMAGE"
-fi
 
 # These values are deprecated, use "ro.product.cpu.abilist"
 # instead (see below).
diff --git a/tools/canoninja/go.mod b/tools/canoninja/go.mod
index c5a924e..9e668a5 100644
--- a/tools/canoninja/go.mod
+++ b/tools/canoninja/go.mod
@@ -1 +1,3 @@
 module canoninja
+
+go 1.19
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 045cb1d..eaa1854 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2019 The Android Open Source Project
 #
@@ -72,9 +72,9 @@
 
 def _get_os_name():
   """Get the host OS name."""
-  if sys.platform == 'linux2':
+  if sys.platform.startswith('linux'):
     return 'linux'
-  if sys.platform == 'darwin':
+  if sys.platform.startswith('darwin'):
     return 'darwin'
   raise ValueError(sys.platform + ' is not supported')
 
@@ -196,11 +196,7 @@
   def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
     """Run llvm-readobj and parse the output."""
     cmd = [llvm_readobj, '--dynamic-table', '--dyn-symbols', elf_file_path]
-    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, _ = proc.communicate()
-    rc = proc.returncode
-    if rc != 0:
-      raise subprocess.CalledProcessError(rc, cmd, out)
+    out = subprocess.check_output(cmd, text=True)
     lines = out.splitlines()
     return cls._parse_llvm_readobj(elf_file_path, header, lines)
 
@@ -467,7 +463,7 @@
     """Check whether all undefined symbols are resolved to a definition."""
     all_elf_files = [self._file_under_test] + self._shared_libs
     missing_symbols = []
-    for sym, imported_vers in self._file_under_test.imported.iteritems():
+    for sym, imported_vers in self._file_under_test.imported.items():
       for imported_ver in imported_vers:
         lib = self._find_symbol_from_libs(all_elf_files, sym, imported_ver)
         if not lib:
diff --git a/tools/check_radio_versions.py b/tools/check_radio_versions.py
index ebe621f..d1d50e6 100755
--- a/tools/check_radio_versions.py
+++ b/tools/check_radio_versions.py
@@ -22,11 +22,18 @@
 except ImportError:
   from sha import sha as sha1
 
-if len(sys.argv) < 2:
+import argparse
+
+parser = argparse.ArgumentParser()
+parser.add_argument("--board_info_txt", nargs="?", required=True)
+parser.add_argument("--board_info_check", nargs="*", required=True)
+args = parser.parse_args()
+
+if not args.board_info_txt:
   sys.exit(0)
 
 build_info = {}
-f = open(sys.argv[1])
+f = open(args.board_info_txt)
 for line in f:
   line = line.strip()
   if line.startswith("require"):
@@ -36,7 +43,7 @@
 
 bad = False
 
-for item in sys.argv[2:]:
+for item in args.board_info_check:
   key, fn = item.split(":", 1)
 
   values = build_info.get(key, None)
@@ -52,8 +59,8 @@
   try:
     f = open(fn + ".sha1")
   except IOError:
-    if not bad: print
-    print "*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key)
+    if not bad: print()
+    print("*** Error opening \"%s.sha1\"; can't verify %s" % (fn, key))
     bad = True
     continue
   for line in f:
@@ -63,17 +70,17 @@
     versions[h] = v
 
   if digest not in versions:
-    if not bad: print
-    print "*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn)
+    if not bad: print()
+    print("*** SHA-1 hash of \"%s\" doesn't appear in \"%s.sha1\"" % (fn, fn))
     bad = True
     continue
 
   if versions[digest] not in values:
-    if not bad: print
-    print "*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
-        fn, versions[digest], key, sys.argv[1])
+    if not bad: print()
+    print("*** \"%s\" is version %s; not any %s allowed by \"%s\"." % (
+        fn, versions[digest], key, args.board_info_txt))
     bad = True
 
 if bad:
-  print
+  print()
   sys.exit(1)
diff --git a/tools/checkowners.py b/tools/checkowners.py
index d6853d8..f037321 100755
--- a/tools/checkowners.py
+++ b/tools/checkowners.py
@@ -5,8 +5,8 @@
 import argparse
 import re
 import sys
-import urllib
-import urllib2
+import urllib.request, urllib.parse, urllib.error
+import urllib.request, urllib.error, urllib.parse
 
 parser = argparse.ArgumentParser(description='Check OWNERS file syntax')
 parser.add_argument('-v', '--verbose', dest='verbose',
@@ -25,15 +25,15 @@
 
 def echo(msg):
   if args.verbose:
-    print msg
+    print(msg)
 
 
 def find_address(address):
   if address not in checked_addresses:
     request = (gerrit_server + '/accounts/?n=1&q=email:'
-               + urllib.quote(address))
+               + urllib.parse.quote(address))
     echo('Checking email address: ' + address)
-    result = urllib2.urlopen(request).read()
+    result = urllib.request.urlopen(request).read()
     checked_addresses[address] = result.find('"_account_id":') >= 0
     if checked_addresses[address]:
       echo('Found email address: ' + address)
@@ -43,7 +43,7 @@
 def check_address(fname, num, address):
   if find_address(address):
     return 0
-  print '%s:%d: ERROR: unknown email address: %s' % (fname, num, address)
+  print('%s:%d: ERROR: unknown email address: %s' % (fname, num, address))
   return 1
 
 
@@ -72,7 +72,7 @@
       stripped_line = re.sub('#.*$', '', line).strip()
       if not patterns.match(stripped_line):
         error += 1
-        print '%s:%d: ERROR: unknown line [%s]' % (fname, num, line.strip())
+        print('%s:%d: ERROR: unknown line [%s]' % (fname, num, line.strip()))
       elif args.check_address:
         if perfile_pattern.match(stripped_line):
           for addr in perfile_pattern.match(stripped_line).group(1).split(','):
diff --git a/tools/compare_fileslist.py b/tools/compare_fileslist.py
deleted file mode 100755
index 1f507d8..0000000
--- a/tools/compare_fileslist.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#!/usr/bin/env python
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import cgi, os, string, sys
-
-def IsDifferent(row):
-  val = None
-  for v in row:
-    if v:
-      if not val:
-        val = v
-      else:
-        if val != v:
-          return True
-  return False
-
-def main(argv):
-  inputs = argv[1:]
-  data = {}
-  index = 0
-  for input in inputs:
-    f = file(input, "r")
-    lines = f.readlines()
-    f.close()
-    lines = map(string.split, lines)
-    lines = map(lambda (x,y): (y,int(x)), lines)
-    for fn,sz in lines:
-      if not data.has_key(fn):
-        data[fn] = {}
-      data[fn][index] = sz
-    index = index + 1
-  rows = []
-  for fn,sizes in data.iteritems():
-    row = [fn]
-    for i in range(0,index):
-      if sizes.has_key(i):
-        row.append(sizes[i])
-      else:
-        row.append(None)
-    rows.append(row)
-  rows = sorted(rows, key=lambda x: x[0])
-  print """<html>
-    <head>
-      <style type="text/css">
-        .fn, .sz, .z, .d {
-          padding-left: 10px;
-          padding-right: 10px;
-        }
-        .sz, .z, .d {
-          text-align: right;
-        }
-        .fn {
-          background-color: #ffffdd;
-        }
-        .sz {
-          background-color: #ffffcc;
-        }
-        .z {
-          background-color: #ffcccc;
-        }
-        .d {
-          background-color: #99ccff;
-        }
-      </style>
-    </head>
-    <body>
-  """
-  print "<table>"
-  print "<tr>"
-  for input in inputs:
-    combo = input.split(os.path.sep)[1]
-    print "  <td class='fn'>%s</td>" % cgi.escape(combo)
-  print "</tr>"
-
-  for row in rows:
-    print "<tr>"
-    for sz in row[1:]:
-      if not sz:
-        print "  <td class='z'>&nbsp;</td>"
-      elif IsDifferent(row[1:]):
-        print "  <td class='d'>%d</td>" % sz
-      else:
-        print "  <td class='sz'>%d</td>" % sz
-    print "  <td class='fn'>%s</td>" % cgi.escape(row[0])
-    print "</tr>"
-  print "</table>"
-  print "</body></html>"
-
-if __name__ == '__main__':
-  main(sys.argv)
-
-
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index ec0f2f9..8e13f2f 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -18,16 +18,33 @@
 }
 
 blueprint_go_binary {
-    name: "checkshare",
+    name: "compliance_checkmetadata",
+    srcs: ["cmd/checkmetadata/checkmetadata.go"],
+    deps: [
+        "compliance-module",
+        "projectmetadata-module",
+        "soong-response",
+    ],
+    testSrcs: ["cmd/checkmetadata/checkmetadata_test.go"],
+}
+
+blueprint_go_binary {
+    name: "compliance_checkshare",
     srcs: ["cmd/checkshare/checkshare.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/checkshare/checkshare_test.go"],
 }
 
 blueprint_go_binary {
     name: "compliancenotice_bom",
     srcs: ["cmd/bom/bom.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/bom/bom_test.go"],
 }
 
@@ -42,23 +59,32 @@
 }
 
 blueprint_go_binary {
-    name: "listshare",
+    name: "compliance_listshare",
     srcs: ["cmd/listshare/listshare.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/listshare/listshare_test.go"],
 }
 
 blueprint_go_binary {
-    name: "dumpgraph",
+    name: "compliance_dumpgraph",
     srcs: ["cmd/dumpgraph/dumpgraph.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/dumpgraph/dumpgraph_test.go"],
 }
 
 blueprint_go_binary {
-    name: "dumpresolutions",
+    name: "compliance_dumpresolutions",
     srcs: ["cmd/dumpresolutions/dumpresolutions.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/dumpresolutions/dumpresolutions_test.go"],
 }
 
@@ -68,14 +94,18 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/htmlnotice/htmlnotice_test.go"],
 }
 
 blueprint_go_binary {
-    name: "rtrace",
+    name: "compliance_rtrace",
     srcs: ["cmd/rtrace/rtrace.go"],
-    deps: ["compliance-module"],
+    deps: [
+        "compliance-module",
+        "soong-response",
+    ],
     testSrcs: ["cmd/rtrace/rtrace_test.go"],
 }
 
@@ -85,6 +115,7 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/textnotice/textnotice_test.go"],
 }
@@ -95,10 +126,22 @@
     deps: [
         "compliance-module",
         "blueprint-deptools",
+        "soong-response",
     ],
     testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
 }
 
+blueprint_go_binary {
+    name: "compliance_sbom",
+    srcs: ["cmd/sbom/sbom.go"],
+    deps: [
+        "compliance-module",
+        "blueprint-deptools",
+        "soong-response",
+    ],
+    testSrcs: ["cmd/sbom/sbom_test.go"],
+}
+
 bootstrap_go_package {
     name: "compliance-module",
     srcs: [
@@ -135,6 +178,8 @@
         "test_util.go",
     ],
     deps: [
+        "compliance-test-fs-module",
+        "projectmetadata-module",
         "golang-protobuf-proto",
         "golang-protobuf-encoding-prototext",
         "license_metadata_proto",
diff --git a/tools/compliance/README.md b/tools/compliance/README.md
new file mode 100644
index 0000000..995d9ca
--- /dev/null
+++ b/tools/compliance/README.md
@@ -0,0 +1,101 @@
+# Compliance
+
+<!-- Much of this content appears too in doc.go
+When changing this file consider whether the change also applies to doc.go -->
+
+Package compliance provides an approved means for reading, consuming, and
+analyzing license metadata graphs.
+
+Assuming the license metadata and dependencies are fully and accurately
+recorded in the build system, any discrepancy between the official policy for
+open source license compliance and this code is **a bug in this code.**
+
+## Naming
+
+All of the code that directly reflects a policy decision belongs in a file with
+a name begninning `policy_`. Changes to these files need to be authored or
+reviewed by someone in OSPO or whichever successor group governs policy.
+
+The files with names not beginning `policy_` describe data types, and general,
+reusable algorithms.
+
+The source code for binary tools and utilities appears under the `cmd/`
+subdirectory. Other subdirectories contain reusable components that are not
+`compliance` per se.
+
+## Data Types
+
+A few principal types to understand are LicenseGraph, LicenseCondition, and
+ResolutionSet.
+
+### LicenseGraph
+
+A LicenseGraph is an immutable graph of the targets and dependencies reachable
+from a specific set of root targets. In general, the root targets will be the
+artifacts in a release or distribution. While conceptually immutable, parts of
+the graph may be loaded or evaluated lazily.
+
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
+### LicenseCondition
+
+A LicenseCondition is an immutable tuple pairing a condition name with an
+originating target. e.g. Per current policy, a static library licensed under an
+MIT license would pair a "notice" condition with the static library target, and
+a dynamic license licensed under GPL would pair a "restricted" condition with
+the dynamic library target.
+
+### ResolutionSet
+
+A ResolutionSet is an immutable set of `AttachesTo`, `ActsOn`, `Resolves`
+tuples describing how license conditions apply to targets.
+
+`AttachesTo` is the trigger for acting. Distribution of the target invokes
+the policy.
+
+`ActsOn` is the target to share, give notice for, hide etc.
+
+`Resolves` is the set of conditions that the action resolves.
+
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
+
+Restricted conditions are different. The infectious nature of restricted often
+means sharing code that is not the target where the restricted condition
+originates. Linking an MIT library to a GPL library implies a policy to share
+the MIT library despite the MIT license having no source sharing requirement.
+
+In this case, one or more resolution tuples will have the MIT license module in
+`ActsOn` and the restricted condition originating at the GPL library module in
+`Resolves`. These tuples will `AttachTo` every target that depends on the GPL
+library because shipping any of those targets trigger the policy to share the
+code.
+
+## Processes
+
+### ReadLicenseGraph
+
+The principal means to ingest license metadata. Given the distribution targets,
+ReadLicenseGraph populates the LicenseGraph for those root targets.
+
+### NoticeIndex.IndexLicenseTexts
+
+IndexLicenseTexts reads, deduplicates and caches license texts for notice
+files. Also reads and caches project metadata for deriving library names.
+
+The algorithm for deriving library names has not been dictated by OSPO policy,
+but reflects a pragmatic attempt to comply with Android policy regarding
+unreleased product names, proprietary partner names etc.
+
+### projectmetadata.Index.MetadataForProjects
+
+MetadataForProjects reads, deduplicates and caches project METADATA files used
+for notice library names, and various properties appearing in SBOMs.
diff --git a/tools/compliance/cmd/bom/bom.go b/tools/compliance/cmd/bom/bom.go
index b613a1f..187f828 100644
--- a/tools/compliance/cmd/bom/bom.go
+++ b/tools/compliance/cmd/bom/bom.go
@@ -24,13 +24,11 @@
 	"path/filepath"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the bill of materials. (default stdout)")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -55,22 +53,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a bill of materials. i.e. the list of installed paths.
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -81,16 +67,52 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a bill of materials. i.e. the list of installed paths.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the bill of materials. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -118,10 +140,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *stripPrefix}
 
-	err := billOfMaterials(ctx, flag.Args()...)
+	err := billOfMaterials(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
diff --git a/tools/compliance/cmd/checkmetadata/checkmetadata.go b/tools/compliance/cmd/checkmetadata/checkmetadata.go
new file mode 100644
index 0000000..c6c84e4
--- /dev/null
+++ b/tools/compliance/cmd/checkmetadata/checkmetadata.go
@@ -0,0 +1,148 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"strings"
+
+	"android/soong/response"
+	"android/soong/tools/compliance"
+	"android/soong/tools/compliance/projectmetadata"
+)
+
+var (
+	failNoneRequested = fmt.Errorf("\nNo projects requested")
+)
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} projectdir {projectdir...}
+
+Tries to open the METADATA.android or METADATA file in each projectdir
+reporting any errors on stderr.
+
+Reports "FAIL" to stdout if any errors found and exits with status 1.
+
+Otherwise, reports "PASS" and the number of project metadata files
+found exiting with status 0.
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+
+	flags.Parse(expandedArgs)
+
+	// Must specify at least one root target.
+	if flags.NArg() == 0 {
+		flags.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	err := checkProjectMetadata(ofile, os.Stderr, compliance.FS, flags.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flags.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		fmt.Fprintln(ofile, "FAIL")
+		os.Exit(1)
+	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+// checkProjectMetadata implements the checkmetadata utility.
+func checkProjectMetadata(stdout, stderr io.Writer, rootFS fs.FS, projects ...string) error {
+
+	if len(projects) < 1 {
+		return failNoneRequested
+	}
+
+	// Read the project metadata files from `projects`
+	ix := projectmetadata.NewIndex(rootFS)
+	pms, err := ix.MetadataForProjects(projects...)
+	if err != nil {
+		return fmt.Errorf("Unable to read project metadata file(s) %q from %q: %w\n", projects, os.Getenv("PWD"), err)
+	}
+
+	fmt.Fprintf(stdout, "PASS -- parsed %d project metadata files for %d projects\n", len(pms), len(projects))
+	return nil
+}
diff --git a/tools/compliance/cmd/checkmetadata/checkmetadata_test.go b/tools/compliance/cmd/checkmetadata/checkmetadata_test.go
new file mode 100644
index 0000000..cf2090b
--- /dev/null
+++ b/tools/compliance/cmd/checkmetadata/checkmetadata_test.go
@@ -0,0 +1,191 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"fmt"
+	"os"
+	"strings"
+	"testing"
+
+	"android/soong/tools/compliance"
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		name           string
+		projects       []string
+		expectedStdout string
+	}{
+		{
+			name:           "1p",
+			projects:       []string{"firstparty"},
+			expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+		},
+		{
+			name:           "notice",
+			projects:       []string{"notice"},
+			expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice",
+			projects:       []string{"firstparty", "notice"},
+			expectedStdout: "PASS -- parsed 2 project metadata files for 2 projects",
+		},
+		{
+			name:           "reciprocal",
+			projects:       []string{"reciprocal"},
+			expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice+reciprocal",
+			projects:       []string{"firstparty", "notice", "reciprocal"},
+			expectedStdout: "PASS -- parsed 3 project metadata files for 3 projects",
+		},
+		{
+			name:           "restricted",
+			projects:       []string{"restricted"},
+			expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice+reciprocal+restricted",
+			projects:       []string{
+				"firstparty",
+				"notice",
+				"reciprocal",
+				"restricted",
+			},
+			expectedStdout: "PASS -- parsed 4 project metadata files for 4 projects",
+		},
+		{
+			name:           "proprietary",
+			projects:       []string{"proprietary"},
+			expectedStdout: "PASS -- parsed 1 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice+reciprocal+restricted+proprietary",
+			projects:       []string{
+				"firstparty",
+				"notice",
+				"reciprocal",
+				"restricted",
+				"proprietary",
+			},
+			expectedStdout: "PASS -- parsed 5 project metadata files for 5 projects",
+		},
+		{
+			name:           "missing1",
+			projects:       []string{"regressgpl1"},
+			expectedStdout: "PASS -- parsed 0 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice+reciprocal+restricted+proprietary+missing1",
+			projects:       []string{
+				"firstparty",
+				"notice",
+				"reciprocal",
+				"restricted",
+				"proprietary",
+				"regressgpl1",
+			},
+			expectedStdout: "PASS -- parsed 5 project metadata files for 6 projects",
+		},
+		{
+			name:           "missing2",
+			projects:       []string{"regressgpl2"},
+			expectedStdout: "PASS -- parsed 0 project metadata files for 1 projects",
+		},
+		{
+			name:           "1p+notice+reciprocal+restricted+proprietary+missing1+missing2",
+			projects:       []string{
+				"firstparty",
+				"notice",
+				"reciprocal",
+				"restricted",
+				"proprietary",
+				"regressgpl1",
+				"regressgpl2",
+			},
+			expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+		},
+		{
+			name:           "missing2+1p+notice+reciprocal+restricted+proprietary+missing1",
+			projects:       []string{
+				"regressgpl2",
+				"firstparty",
+				"notice",
+				"reciprocal",
+				"restricted",
+				"proprietary",
+				"regressgpl1",
+			},
+			expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+		},
+		{
+			name:           "missing2+1p+notice+missing1+reciprocal+restricted+proprietary",
+			projects:       []string{
+				"regressgpl2",
+				"firstparty",
+				"notice",
+				"regressgpl1",
+				"reciprocal",
+				"restricted",
+				"proprietary",
+			},
+			expectedStdout: "PASS -- parsed 5 project metadata files for 7 projects",
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			projects := make([]string, 0, len(tt.projects))
+			for _, project := range tt.projects {
+				projects = append(projects, "testdata/"+project)
+			}
+			err := checkProjectMetadata(stdout, stderr, compliance.GetFS(""), projects...)
+			if err != nil {
+				t.Fatalf("checkmetadata: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			var actualStdout string
+			for _, s := range strings.Split(stdout.String(), "\n") {
+				ts := strings.TrimLeft(s, " \t")
+				if len(ts) < 1 {
+					continue
+				}
+				if len(actualStdout) > 0 {
+					t.Errorf("checkmetadata: unexpected multiple output lines %q, want %q", actualStdout+"\n"+ts, tt.expectedStdout)
+				}
+				actualStdout = ts
+			}
+			if actualStdout != tt.expectedStdout {
+				t.Errorf("checkmetadata: unexpected stdout %q, want %q", actualStdout, tt.expectedStdout)
+			}
+		})
+	}
+}
diff --git a/tools/compliance/cmd/checkshare/checkshare.go b/tools/compliance/cmd/checkshare/checkshare.go
index 73bdcb5..f7b4cd2 100644
--- a/tools/compliance/cmd/checkshare/checkshare.go
+++ b/tools/compliance/cmd/checkshare/checkshare.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -22,31 +23,12 @@
 	"os"
 	"path/filepath"
 	"sort"
+	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
-
-Reports on stderr any targets where policy says that the source both
-must and must not be shared. The error report indicates the target, the
-license condition that has a source privacy policy, and the license
-condition that has a source sharing policy.
-
-Any given target may appear multiple times with different combinations
-of conflicting license conditions.
-
-If all the source code that policy says must be shared may be shared,
-outputs "PASS" to stdout and exits with status 0.
-
-If policy says any source must both be shared and not be shared,
-outputs "FAIL" to stdout and exits with status 1.
-`, filepath.Base(os.Args[0]))
-	}
-}
-
 var (
 	failConflicts     = fmt.Errorf("conflicts")
 	failNoneRequested = fmt.Errorf("\nNo metadata files requested")
@@ -61,24 +43,105 @@
 func (l byError) Less(i, j int) bool { return l[i].Error() < l[j].Error() }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
+
+Reports on stderr any targets where policy says that the source both
+must and must not be shared. The error report indicates the target, the
+license condition that has a source privacy policy, and the license
+condition that has a source sharing policy.
+
+Any given target may appear multiple times with different combinations
+of conflicting license conditions.
+
+If all the source code that policy says must be shared may be shared,
+outputs "PASS" to stdout and exits with status 0.
+
+If policy says any source must both be shared and not be shared,
+outputs "FAIL" to stdout and exits with status 1.
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
-	err := checkShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	err := checkShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err != failConflicts {
 			if err == failNoneRequested {
-				flag.Usage()
+				flags.Usage()
 			}
 			fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		}
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
@@ -92,7 +155,7 @@
 	// Read the license graph from the license metadata files (*.meta_lic).
 	licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
 	if err != nil {
-		return fmt.Errorf("Unable to read license metadata file(s) %q: %w\n", files, err)
+		return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %w\n", files, os.Getenv("PWD"), err)
 	}
 	if licenseGraph == nil {
 		return failNoLicenses
diff --git a/tools/compliance/cmd/dumpgraph/dumpgraph.go b/tools/compliance/cmd/dumpgraph/dumpgraph.go
index 32a3fc4..5625779 100644
--- a/tools/compliance/cmd/dumpgraph/dumpgraph.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,14 +25,11 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
-	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -55,8 +53,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs space-separated Target Dependency Annotations tuples for each
@@ -70,42 +104,68 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+	labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
 	ctx := &context{*graphViz, *labelConditions, *stripPrefix}
 
-	err := dumpGraph(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	err := dumpGraph(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
index d02c238..dc0cf88 100644
--- a/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,15 +25,11 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	conditions      = newMultiString("c", "License condition to resolve. (may be given multiple times)")
-	graphViz        = flag.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
-	labelConditions = flag.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -57,8 +54,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -75,32 +108,52 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	conditions := newMultiString(flags, "c", "License condition to resolve. (may be given multiple times)")
+	graphViz := flags.Bool("dot", false, "Whether to output graphviz (i.e. dot) format.")
+	labelConditions := flags.Bool("label_conditions", false, "Whether to label target nodes with conditions.")
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
 	lcs := make([]compliance.LicenseCondition, 0, len(*conditions))
 	for _, name := range *conditions {
 		lcs = append(lcs, compliance.RecognizedConditionNames[name])
@@ -111,14 +164,21 @@
 		labelConditions: *labelConditions,
 		stripPrefix:     *stripPrefix,
 	}
-	_, err := dumpResolutions(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	_, err := dumpResolutions(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
index e98b272..78371ee 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -24,21 +24,16 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	includeTOC  = flag.Bool("toc", true, "Whether to include a table of contents.")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -70,23 +65,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
-ends with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -97,16 +79,57 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an html NOTICE.html or gzipped NOTICE.html.gz file if the -o filename
+ends with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	includeTOC := flags.Bool("toc", true, "Whether to include a table of contents.")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -143,10 +166,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *includeTOC, *product, *stripPrefix, *title, &deps}
 
-	err := htmlNotice(ctx, flag.Args()...)
+	err := htmlNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
@@ -253,7 +276,8 @@
 	}
 	fmt.Fprintln(ctx.stdout, "</body></html>")
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
index b927018..8dc1197 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
@@ -78,7 +78,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition:  "firstparty",
@@ -106,7 +115,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -124,7 +142,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition:  "firstparty",
@@ -154,7 +181,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -170,7 +206,16 @@
 				usedBy{"container.zip/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -182,7 +227,13 @@
 				usedBy{"application"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -194,7 +245,12 @@
 				usedBy{"bin/bin1"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -206,7 +262,10 @@
 				usedBy{"lib/libd.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -231,6 +290,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -256,6 +322,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -275,6 +348,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -296,6 +373,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -308,7 +388,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -333,6 +416,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -358,6 +448,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -377,6 +474,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -398,6 +499,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -410,7 +514,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "restricted",
@@ -440,6 +547,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -470,6 +584,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -489,6 +610,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -513,6 +638,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -525,7 +653,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -555,6 +686,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -586,6 +724,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -606,6 +751,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -627,6 +776,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -639,7 +791,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/cmd/listshare/listshare.go b/tools/compliance/cmd/listshare/listshare.go
index 7f4038b..4ca6457 100644
--- a/tools/compliance/cmd/listshare/listshare.go
+++ b/tools/compliance/cmd/listshare/listshare.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,12 +25,41 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s file.meta_lic {file.meta_lic...}
+var (
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {-o outfile} file.meta_lic {file.meta_lic...}
 
 Outputs a csv file with 1 project per line in the first field followed
 by target:condition pairs describing why the project must be shared.
@@ -39,30 +69,61 @@
 restricted (e.g. GPL) or reciprocal (e.g. MPL).
 `, filepath.Base(os.Args[0]))
 	}
-}
 
-var (
-	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
-	failNoLicenses    = fmt.Errorf("No licenses found")
-)
+	outputFile := flags.String("o", "-", "Where to write the list of projects to share. (default stdout)")
 
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
-	err := listShare(os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	err := listShare(ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
@@ -76,7 +137,7 @@
 	// Read the license graph from the license metadata files (*.meta_lic).
 	licenseGraph, err := compliance.ReadLicenseGraph(rootFS, stderr, files)
 	if err != nil {
-		return fmt.Errorf("Unable to read license metadata file(s) %q: %v\n", files, err)
+		return fmt.Errorf("Unable to read license metadata file(s) %q from %q: %v\n", files, os.Getenv("PWD"), err)
 	}
 	if licenseGraph == nil {
 		return failNoLicenses
@@ -88,6 +149,9 @@
 	// Group the resolutions by project.
 	presolution := make(map[string]compliance.LicenseConditionSet)
 	for _, target := range shareSource.AttachesTo() {
+		if shareSource.IsPureAggregate(target) && !target.LicenseConditions().MatchesAnySet(compliance.ImpliesShared) {
+			continue
+		}
 		rl := shareSource.Resolutions(target)
 		sort.Sort(rl)
 		for _, r := range rl {
diff --git a/tools/compliance/cmd/listshare/listshare_test.go b/tools/compliance/cmd/listshare/listshare_test.go
index c1e38be..fb61583 100644
--- a/tools/compliance/cmd/listshare/listshare_test.go
+++ b/tools/compliance/cmd/listshare/listshare_test.go
@@ -194,13 +194,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project: "highest/apex",
-					conditions: []string{
-						"restricted",
-						"restricted_allows_dynamic_linking",
-					},
-				},
-				{
 					project: "static/binary",
 					conditions: []string{
 						"restricted_allows_dynamic_linking",
@@ -225,13 +218,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project: "container/zip",
-					conditions: []string{
-						"restricted",
-						"restricted_allows_dynamic_linking",
-					},
-				},
-				{
 					project:    "device/library",
 					conditions: []string{"restricted_allows_dynamic_linking"},
 				},
@@ -320,10 +306,6 @@
 					project:    "dynamic/binary",
 					conditions: []string{"restricted"},
 				},
-				{
-					project:    "highest/apex",
-					conditions: []string{"restricted"},
-				},
 			},
 		},
 		{
@@ -336,10 +318,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project:    "container/zip",
-					conditions: []string{"restricted"},
-				},
-				{
 					project:    "dynamic/binary",
 					conditions: []string{"restricted"},
 				},
@@ -381,10 +359,6 @@
 					project:    "bin/threelibraries",
 					conditions: []string{"restricted"},
 				},
-				{
-					project:    "container/zip",
-					conditions: []string{"restricted"},
-				},
 			},
 		},
 		{
@@ -397,10 +371,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project:    "container/zip",
-					conditions: []string{"restricted"},
-				},
-				{
 					project:    "lib/apache",
 					conditions: []string{"restricted"},
 				},
@@ -420,10 +390,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project:    "container/zip",
-					conditions: []string{"restricted"},
-				},
-				{
 					project:    "lib/apache",
 					conditions: []string{"restricted"},
 				},
@@ -447,10 +413,6 @@
 					conditions: []string{"restricted"},
 				},
 				{
-					project:    "container/zip",
-					conditions: []string{"restricted"},
-				},
-				{
 					project:    "lib/apache",
 					conditions: []string{"restricted"},
 				},
diff --git a/tools/compliance/cmd/rtrace/rtrace.go b/tools/compliance/cmd/rtrace/rtrace.go
index 91171c4..667cdce 100644
--- a/tools/compliance/cmd/rtrace/rtrace.go
+++ b/tools/compliance/cmd/rtrace/rtrace.go
@@ -15,6 +15,7 @@
 package main
 
 import (
+	"bytes"
 	"flag"
 	"fmt"
 	"io"
@@ -24,21 +25,19 @@
 	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 )
 
 var (
-	sources         = newMultiString("rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
-	stripPrefix     = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoSources     = fmt.Errorf("\nNo projects or metadata files to trace back from")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
 type context struct {
-	sources         []string
-	stripPrefix     []string
+	sources     []string
+	stripPrefix []string
 }
 
 func (ctx context) strip(installPath string) string {
@@ -54,8 +53,44 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
 		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
 
 Outputs a space-separated Target ActsOn Origin Condition tuple for each
@@ -72,50 +107,75 @@
 
 Options:
 `, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
+		flags.PrintDefaults()
 	}
-}
 
-// newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
-	var f multiString
-	flag.Var(&f, name, usage)
-	return &f
-}
+	outputFile := flags.String("o", "-", "Where to write the output. (default stdout)")
+	sources := newMultiString(flags, "rtrace", "Projects or metadata files to trace back from. (required; multiple allowed)")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
 
-// multiString implements the flag `Value` interface for multiple strings.
-type multiString []string
-
-func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
-func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
-
-func main() {
-	flag.Parse()
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*sources) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "\nMust specify at least 1 --rtrace source.\n")
 		os.Exit(2)
 	}
 
-	ctx := &context{
-		sources:         *sources,
-		stripPrefix:     *stripPrefix,
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
 	}
-	_, err := traceRestricted(ctx, os.Stdout, os.Stderr, compliance.FS, flag.Args()...)
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	ctx := &context{
+		sources:     *sources,
+		stripPrefix: *stripPrefix,
+	}
+	_, err := traceRestricted(ctx, ofile, os.Stderr, compliance.FS, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
 	}
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q from %q: %s\n", *outputFile, os.Getenv("PWD"), err)
+			os.Exit(1)
+		}
+	}
 	os.Exit(0)
 }
 
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
new file mode 100644
index 0000000..afb377e
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -0,0 +1,399 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+	"time"
+
+	"android/soong/response"
+	"android/soong/tools/compliance"
+	"android/soong/tools/compliance/projectmetadata"
+
+	"github.com/google/blueprint/deptools"
+)
+
+var (
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+type context struct {
+	stdout       io.Writer
+	stderr       io.Writer
+	rootFS       fs.FS
+	product      string
+	stripPrefix  []string
+	creationTime creationTimeGetter
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				p = ctx.product
+			}
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an SBOM.spdx.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the SBOM spdx file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	flags.Parse(expandedArgs)
+
+	// Must specify at least one root target.
+	if flags.NArg() == 0 {
+		flags.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+
+	deps, err := sbomGenerator(ctx, flags.Args()...)
+	if err != nil {
+		if err == failNoneRequested {
+			flags.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+
+	if *depsFile != "" {
+		err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+type creationTimeGetter func() time.Time
+
+// actualTime returns current time in UTC
+func actualTime() time.Time {
+	return time.Now().UTC()
+}
+
+// replaceSlashes replaces "/" by "-" for the library path to be used for packages & files SPDXID
+func replaceSlashes(x string) string {
+	return strings.ReplaceAll(x, "/", "-")
+}
+
+// getPackageName returns a package name of a target Node
+func getPackageName(_ *context, tn *compliance.TargetNode) string {
+	return replaceSlashes(tn.Name())
+}
+
+// getDocumentName returns a package name of a target Node
+func getDocumentName(ctx *context, tn *compliance.TargetNode, pm *projectmetadata.ProjectMetadata) string {
+	if len(ctx.product) > 0 {
+		return replaceSlashes(ctx.product)
+	}
+	if len(tn.ModuleName()) > 0 {
+		if pm != nil {
+			return replaceSlashes(pm.Name() + ":" + tn.ModuleName())
+		}
+		return replaceSlashes(tn.ModuleName())
+	}
+
+	// TO DO: Replace tn.Name() with pm.Name() + parts of the target name
+	return replaceSlashes(tn.Name())
+}
+
+// getDownloadUrl returns the download URL if available (GIT, SVN, etc..),
+// or NOASSERTION if not available, none determined or ambiguous
+func getDownloadUrl(_ *context, pm *projectmetadata.ProjectMetadata) string {
+	if pm == nil {
+		return "NOASSERTION"
+	}
+
+	urlsByTypeName := pm.UrlsByTypeName()
+	if urlsByTypeName == nil {
+		return "NOASSERTION"
+	}
+
+	url := urlsByTypeName.DownloadUrl()
+	if url == "" {
+		return "NOASSERTION"
+	}
+	return url
+}
+
+// getProjectMetadata returns the project metadata for the target node
+func getProjectMetadata(_ *context, pmix *projectmetadata.Index,
+	tn *compliance.TargetNode) (*projectmetadata.ProjectMetadata, error) {
+	pms, err := pmix.MetadataForProjects(tn.Projects()...)
+	if err != nil {
+		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn, err)
+	}
+	if len(pms) == 0 {
+		return nil, nil
+	}
+
+	// TO DO: skip first element if it doesn't have one of the three info needed
+	return pms[0], nil
+}
+
+// sbomGenerator implements the spdx bom utility
+
+// SBOM is part of the new government regulation issued to improve national cyber security
+// and enhance software supply chain and transparency, see https://www.cisa.gov/sbom
+
+// sbomGenerator uses the SPDX standard, see the SPDX specification (https://spdx.github.io/spdx-spec/)
+// sbomGenerator is also following the internal google SBOM styleguide (http://goto.google.com/spdx-style-guide)
+func sbomGenerator(ctx *context, files ...string) ([]string, error) {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return nil, failNoneRequested
+	}
+
+	pmix := projectmetadata.NewIndex(ctx.rootFS)
+
+	lg, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+
+	if err != nil {
+		return nil, fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	// implementing the licenses references for the packages
+	licenses := make(map[string]string)
+	concludedLicenses := func(licenseTexts []string) string {
+		licenseRefs := make([]string, 0, len(licenseTexts))
+		for _, licenseText := range licenseTexts {
+			license := strings.SplitN(licenseText, ":", 2)[0]
+			if _, ok := licenses[license]; !ok {
+				licenseRef := "LicenseRef-" + replaceSlashes(license)
+				licenses[license] = licenseRef
+			}
+
+			licenseRefs = append(licenseRefs, licenses[license])
+		}
+		if len(licenseRefs) > 1 {
+			return "(" + strings.Join(licenseRefs, " AND ") + ")"
+		} else if len(licenseRefs) == 1 {
+			return licenseRefs[0]
+		}
+		return "NONE"
+	}
+
+	isMainPackage := true
+	var mainPackage string
+	visitedNodes := make(map[*compliance.TargetNode]struct{})
+
+	// performing a Breadth-first top down walk of licensegraph and building package information
+	compliance.WalkTopDownBreadthFirst(nil, lg,
+		func(lg *compliance.LicenseGraph, tn *compliance.TargetNode, path compliance.TargetEdgePath) bool {
+			if err != nil {
+				return false
+			}
+			var pm *projectmetadata.ProjectMetadata
+			pm, err = getProjectMetadata(ctx, pmix, tn)
+			if err != nil {
+				return false
+			}
+
+			if isMainPackage {
+				mainPackage = getDocumentName(ctx, tn, pm)
+				fmt.Fprintf(ctx.stdout, "SPDXVersion: SPDX-2.2\n")
+				fmt.Fprintf(ctx.stdout, "DataLicense: CC-1.0\n")
+				fmt.Fprintf(ctx.stdout, "DocumentName: %s\n", mainPackage)
+				fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-DOCUMENT-%s\n", mainPackage)
+				fmt.Fprintf(ctx.stdout, "DocumentNamespace: Android\n")
+				fmt.Fprintf(ctx.stdout, "Creator: Organization: Google LLC\n")
+				fmt.Fprintf(ctx.stdout, "Created: %s\n", ctx.creationTime().Format("2006-01-02T15:04:05Z"))
+				isMainPackage = false
+			}
+
+			relationships := make([]string, 0, 1)
+			defer func() {
+				if r := recover(); r != nil {
+					panic(r)
+				}
+				for _, relationship := range relationships {
+					fmt.Fprintln(ctx.stdout, relationship)
+				}
+			}()
+			if len(path) == 0 {
+				relationships = append(relationships,
+					fmt.Sprintf("Relationship: SPDXRef-DOCUMENT-%s DESCRIBES SPDXRef-Package-%s",
+						mainPackage, getPackageName(ctx, tn)))
+			} else {
+				// Check parent and identify annotation
+				parent := path[len(path)-1]
+				targetEdge := parent.Edge()
+				if targetEdge.IsRuntimeDependency() {
+					// Adding the dynamic link annotation RUNTIME_DEPENDENCY_OF relationship
+					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s RUNTIME_DEPENDENCY_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+
+				} else if targetEdge.IsDerivation() {
+					// Adding the  derivation annotation as a CONTAINS relationship
+					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s CONTAINS SPDXRef-Package-%s", getPackageName(ctx, targetEdge.Target()), getPackageName(ctx, tn)))
+
+				} else if targetEdge.IsBuildTool() {
+					// Adding the toolchain annotation as a BUILD_TOOL_OF relationship
+					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s BUILD_TOOL_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+				} else {
+					panic(fmt.Errorf("Unknown dependency type: %v", targetEdge.Annotations()))
+				}
+			}
+
+			if _, alreadyVisited := visitedNodes[tn]; alreadyVisited {
+				return false
+			}
+			visitedNodes[tn] = struct{}{}
+			pkgName := getPackageName(ctx, tn)
+			fmt.Fprintf(ctx.stdout, "##### Package: %s\n", strings.Replace(pkgName, "-", "/", -2))
+			fmt.Fprintf(ctx.stdout, "PackageName: %s\n", pkgName)
+			if pm != nil && pm.Version() != "" {
+				fmt.Fprintf(ctx.stdout, "PackageVersion: %s\n", pm.Version())
+			}
+			fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-Package-%s\n", pkgName)
+			fmt.Fprintf(ctx.stdout, "PackageDownloadLocation: %s\n", getDownloadUrl(ctx, pm))
+			fmt.Fprintf(ctx.stdout, "PackageLicenseConcluded: %s\n", concludedLicenses(tn.LicenseTexts()))
+			return true
+		})
+
+	fmt.Fprintf(ctx.stdout, "##### Non-standard license:\n")
+
+	licenseTexts := make([]string, 0, len(licenses))
+
+	for licenseText := range licenses {
+		licenseTexts = append(licenseTexts, licenseText)
+	}
+
+	sort.Strings(licenseTexts)
+
+	for _, licenseText := range licenseTexts {
+		fmt.Fprintf(ctx.stdout, "LicenseID: %s\n", licenses[licenseText])
+		// open the file
+		f, err := ctx.rootFS.Open(filepath.Clean(licenseText))
+		if err != nil {
+			return nil, fmt.Errorf("error opening license text file %q: %w", licenseText, err)
+		}
+
+		// read the file
+		text, err := io.ReadAll(f)
+		if err != nil {
+			return nil, fmt.Errorf("error reading license text file %q: %w", licenseText, err)
+		}
+		// adding the extracted license text
+		fmt.Fprintf(ctx.stdout, "ExtractedText: <text>%v</text>\n", string(text))
+	}
+
+	deps := licenseTexts
+	return deps, nil
+}
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
new file mode 100644
index 0000000..6b40a27
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -0,0 +1,1629 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bufio"
+	"bytes"
+	"fmt"
+	"os"
+	"reflect"
+	"regexp"
+	"strings"
+	"testing"
+	"time"
+
+	"android/soong/tools/compliance"
+)
+
+var (
+	spdxVersionTag              = regexp.MustCompile(`^\s*SPDXVersion: SPDX-2.2\s*$`)
+	spdxDataLicenseTag          = regexp.MustCompile(`^\s*DataLicense: CC-1.0\s*$`)
+	spdxDocumentNameTag         = regexp.MustCompile(`^\s*DocumentName:\s*Android*\s*$`)
+	spdxIDTag                   = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-DOCUMENT-(.*)\s*$`)
+	spdxDocumentNameSpaceTag    = regexp.MustCompile(`^\s*DocumentNamespace:\s*Android\s*$`)
+	spdxCreatorOrganizationTag  = regexp.MustCompile(`^\s*Creator:\s*Organization:\s*Google LLC\s*$`)
+	spdxCreatedTimeTag          = regexp.MustCompile(`^\s*Created: 1970-01-01T00:00:00Z\s*$`)
+	spdxPackageTag              = regexp.MustCompile(`^\s*#####\s*Package:\s*(.*)\s*$`)
+	spdxPackageNameTag          = regexp.MustCompile(`^\s*PackageName:\s*(.*)\s*$`)
+	spdxPkgIDTag                = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-Package-(.*)\s*$`)
+	spdxPkgDownloadLocationTag  = regexp.MustCompile(`^\s*PackageDownloadLocation:\s*NOASSERTION\s*$`)
+	spdxPkgLicenseDeclaredTag   = regexp.MustCompile(`^\s*PackageLicenseConcluded:\s*LicenseRef-(.*)\s*$`)
+	spdxRelationshipTag         = regexp.MustCompile(`^\s*Relationship:\s*SPDXRef-(.*)\s*(DESCRIBES|CONTAINS|BUILD_TOOL_OF|RUNTIME_DEPENDENCY_OF)\s*SPDXRef-Package-(.*)\s*$`)
+	spdxLicenseTag              = regexp.MustCompile(`^\s*##### Non-standard license:\s*$`)
+	spdxLicenseIDTag            = regexp.MustCompile(`^\s*LicenseID: LicenseRef-(.*)\s*$`)
+	spdxExtractedTextTag        = regexp.MustCompile(`^\s*ExtractedText:\s*<text>(.*)\s*$`)
+	spdxExtractedClosingTextTag = regexp.MustCompile(`^\s*</text>\s*$`)
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition    string
+		name         string
+		outDir       string
+		roots        []string
+		stripPrefix  string
+		expectedOut  []matcher
+		expectedDeps []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/firstparty/highest.apex.meta_lic"},
+				packageName{"testdata/firstparty/highest.apex.meta_lic"},
+				spdxPkgID{"testdata/firstparty/highest.apex.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata-firstparty-highest.apex.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata-firstparty-bin-bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata-firstparty-bin-bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/firstparty/application.meta_lic"},
+				packageName{"testdata/firstparty/application.meta_lic"},
+				spdxPkgID{"testdata/firstparty/application.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/application.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/firstparty/bin/bin3.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin3.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin3.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin3.meta_lic ", "testdata-firstparty-application.meta_lic", "BUILD_TOOL_OF"},
+				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/application.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata-firstparty-application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/firstparty/container.zip.meta_lic"},
+				packageName{"testdata/firstparty/container.zip.meta_lic"},
+				spdxPkgID{"testdata/firstparty/container.zip.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/container.zip.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
+				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/bin/bin1.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
+				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
+				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/firstparty/lib/libd.so.meta_lic", "DESCRIBES"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/notice/highest.apex.meta_lic"},
+				packageName{"testdata/notice/highest.apex.meta_lic"},
+				spdxPkgID{"testdata/notice/highest.apex.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/notice/highest.apex.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/notice/bin/bin1.meta_lic"},
+				packageName{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/bin/bin2.meta_lic"},
+				packageName{"testdata/notice/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+				packageName{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+				packageName{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+				packageName{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+				packageName{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/notice/container.zip.meta_lic"},
+				packageName{"testdata/notice/container.zip.meta_lic"},
+				spdxPkgID{"testdata/notice/container.zip.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/notice/container.zip.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/notice/bin/bin1.meta_lic"},
+				packageName{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/bin/bin2.meta_lic"},
+				packageName{"testdata/notice/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+				packageName{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+				packageName{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+				packageName{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+				packageName{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/notice/application.meta_lic"},
+				packageName{"testdata/notice/application.meta_lic"},
+				spdxPkgID{"testdata/notice/application.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata-notice-application.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/notice/bin/bin3.meta_lic"},
+				packageName{"testdata/notice/bin/bin3.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin3.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata-notice-bin-bin3.meta_lic ", "testdata/notice/application.meta_lic", "BUILD_TOOL_OF"},
+				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+				packageName{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/application.meta_lic ", "testdata-notice-lib-liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
+				packageName{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata-notice-lib-libb.so.meta_lic ", "testdata/notice/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/notice/bin/bin1.meta_lic"},
+				packageName{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/notice/bin/bin1.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
+				packageName{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
+				packageName{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
+				packageName{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/notice/lib/libd.so.meta_lic", "DESCRIBES"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/reciprocal/highest.apex.meta_lic"},
+				packageName{"testdata/reciprocal/highest.apex.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/highest.apex.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/highest.apex.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/reciprocal/container.zip.meta_lic"},
+				packageName{"testdata/reciprocal/container.zip.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/container.zip.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/container.zip.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/reciprocal/application.meta_lic"},
+				packageName{"testdata/reciprocal/application.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/application.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/application.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/reciprocal/bin/bin3.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin3.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin3.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata-reciprocal-bin-bin3.meta_lic ", "testdata/reciprocal/application.meta_lic", "BUILD_TOOL_OF"},
+				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/application.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
+				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/bin/bin1.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/reciprocal/lib/libd.so.meta_lic", "DESCRIBES"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+			},
+		},
+		{
+			condition:   "restricted",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/apex/",
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/restricted/highest.apex.meta_lic"},
+				packageName{"testdata/restricted/highest.apex.meta_lic"},
+				spdxPkgID{"testdata/restricted/highest.apex.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/highest.apex.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+				packageName{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/bin/bin2.meta_lic"},
+				packageName{"testdata/restricted/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
+				packageName{"testdata/restricted/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition:   "restricted",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
+			stripPrefix: "out/target/product/fictional/system/apex/",
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/restricted/container.zip.meta_lic"},
+				packageName{"testdata/restricted/container.zip.meta_lic"},
+				spdxPkgID{"testdata/restricted/container.zip.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/container.zip.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+				packageName{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/bin/bin2.meta_lic"},
+				packageName{"testdata/restricted/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
+				packageName{"testdata/restricted/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
+				packageName{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/bin/bin1.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
+				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
+				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
+				spdxExtractedText{"$$$Reciprocal License$$$"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
+				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/restricted/lib/libd.so.meta_lic", "DESCRIBES"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/proprietary/highest.apex.meta_lic"},
+				packageName{"testdata/proprietary/highest.apex.meta_lic"},
+				spdxPkgID{"testdata/proprietary/highest.apex.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/highest.apex.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxExtractedText{"@@@Proprietary License@@@"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/proprietary/container.zip.meta_lic"},
+				packageName{"testdata/proprietary/container.zip.meta_lic"},
+				spdxPkgID{"testdata/proprietary/container.zip.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/container.zip.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin2.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxExtractedText{"@@@Proprietary License@@@"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/proprietary/application.meta_lic"},
+				packageName{"testdata/proprietary/application.meta_lic"},
+				spdxPkgID{"testdata/proprietary/application.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/application.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/proprietary/bin/bin3.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin3.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin3.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin3.meta_lic ", "testdata/proprietary/application.meta_lic", "BUILD_TOOL_OF"},
+				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/application.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/lib/libb.so.meta_lic ", "testdata/proprietary/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxExtractedText{"@@@Proprietary License@@@"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
+				spdxExtractedText{"###Restricted License###"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
+				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/bin/bin1.meta_lic", "DESCRIBES"},
+				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
+				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
+				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
+				spdxExtractedText{"&&&First Party License&&&"},
+				spdxExtractedClosingText{},
+				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
+				spdxExtractedText{"@@@Proprietary License@@@"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: []matcher{
+				spdxVersion{},
+				spdxDataLicense{},
+				spdxDocumentName{"Android"},
+				spdxID{"Android"},
+				spdxDocumentNameSpace{},
+				spdxCreatorOrganization{},
+				spdxCreatedTime{},
+				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
+				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
+				spdxPkgDownloadLocation{"NOASSERTION"},
+				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
+				spdxRelationship{"DOCUMENT-Android ", "testdata/proprietary/lib/libd.so.meta_lic", "DESCRIBES"},
+				spdxLicense{},
+				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
+				spdxExtractedText{"%%%Notice License%%%"},
+				spdxExtractedClosingText{},
+			},
+			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "Android", []string{tt.stripPrefix}, fakeTime}
+
+			deps, err := sbomGenerator(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("sbom: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("sbom: gotStderr = %v, want none", stderr)
+			}
+
+			t.Logf("got stdout: %s", stdout.String())
+
+			t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
+
+			out := bufio.NewScanner(stdout)
+			lineno := 0
+			for out.Scan() {
+				line := out.Text()
+				if strings.TrimLeft(line, " ") == "" {
+					continue
+				}
+				if len(tt.expectedOut) <= lineno {
+					t.Errorf("sbom: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
+				} else if !tt.expectedOut[lineno].isMatch(line) {
+					t.Errorf("sbom: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno])
+				}
+				lineno++
+			}
+			for ; lineno < len(tt.expectedOut); lineno++ {
+				t.Errorf("bom: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno])
+			}
+
+			t.Logf("got deps: %q", deps)
+
+			t.Logf("want deps: %q", tt.expectedDeps)
+
+			if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+				t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+					strings.Join(w, "\n"), strings.Join(g, "\n"))
+			}
+		})
+	}
+}
+
+type matcher interface {
+	isMatch(line string) bool
+	String() string
+}
+
+type packageTag struct {
+	name string
+}
+
+func (m packageTag) isMatch(line string) bool {
+	groups := spdxPackageTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == m.name
+}
+
+func (m packageTag) String() string {
+	return "##### Package: " + m.name
+}
+
+type packageName struct {
+	name string
+}
+
+func (m packageName) isMatch(line string) bool {
+	groups := spdxPackageNameTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m packageName) String() string {
+	return "PackageName: " + replaceSlashes(m.name)
+}
+
+type spdxID struct {
+	name string
+}
+
+func (m spdxID) isMatch(line string) bool {
+	groups := spdxIDTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxID) String() string {
+	return "SPDXID: SPDXRef-DOCUMENT-" + replaceSlashes(m.name)
+}
+
+type spdxPkgID struct {
+	name string
+}
+
+func (m spdxPkgID) isMatch(line string) bool {
+	groups := spdxPkgIDTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxPkgID) String() string {
+	return "SPDXID: SPDXRef-Package-" + replaceSlashes(m.name)
+}
+
+type spdxVersion struct{}
+
+func (m spdxVersion) isMatch(line string) bool {
+	return spdxVersionTag.MatchString(line)
+}
+
+func (m spdxVersion) String() string {
+	return "SPDXVersion: SPDX-2.2"
+}
+
+type spdxDataLicense struct{}
+
+func (m spdxDataLicense) isMatch(line string) bool {
+	return spdxDataLicenseTag.MatchString(line)
+}
+
+func (m spdxDataLicense) String() string {
+	return "DataLicense: CC-1.0"
+}
+
+type spdxDocumentName struct {
+	name string
+}
+
+func (m spdxDocumentName) isMatch(line string) bool {
+	return spdxDocumentNameTag.MatchString(line)
+}
+
+func (m spdxDocumentName) String() string {
+	return "DocumentName: " + m.name
+}
+
+type spdxDocumentNameSpace struct {
+	name string
+}
+
+func (m spdxDocumentNameSpace) isMatch(line string) bool {
+	return spdxDocumentNameSpaceTag.MatchString(line)
+}
+
+func (m spdxDocumentNameSpace) String() string {
+	return "DocumentNameSpace: Android"
+}
+
+type spdxCreatorOrganization struct{}
+
+func (m spdxCreatorOrganization) isMatch(line string) bool {
+	return spdxCreatorOrganizationTag.MatchString(line)
+}
+
+func (m spdxCreatorOrganization) String() string {
+	return "Creator: Organization: Google LLC"
+}
+
+func fakeTime() time.Time {
+	return time.UnixMicro(0).UTC()
+}
+
+type spdxCreatedTime struct{}
+
+func (m spdxCreatedTime) isMatch(line string) bool {
+	return spdxCreatedTimeTag.MatchString(line)
+}
+
+func (m spdxCreatedTime) String() string {
+	return "Created: 1970-01-01T00:00:00Z"
+}
+
+type spdxPkgDownloadLocation struct {
+	name string
+}
+
+func (m spdxPkgDownloadLocation) isMatch(line string) bool {
+	return spdxPkgDownloadLocationTag.MatchString(line)
+}
+
+func (m spdxPkgDownloadLocation) String() string {
+	return "PackageDownloadLocation: " + m.name
+}
+
+type spdxPkgLicenseDeclared struct {
+	name string
+}
+
+func (m spdxPkgLicenseDeclared) isMatch(line string) bool {
+	groups := spdxPkgLicenseDeclaredTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxPkgLicenseDeclared) String() string {
+	return "PackageLicenseConcluded: LicenseRef-" + m.name
+}
+
+type spdxRelationship struct {
+	pkg1     string
+	pkg2     string
+	relation string
+}
+
+func (m spdxRelationship) isMatch(line string) bool {
+	groups := spdxRelationshipTag.FindStringSubmatch(line)
+	if len(groups) != 4 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.pkg1) && groups[2] == m.relation && groups[3] == replaceSlashes(m.pkg2)
+}
+
+func (m spdxRelationship) String() string {
+	return "Relationship: SPDXRef-" + replaceSlashes(m.pkg1) + " " + m.relation + " SPDXRef-Package-" + replaceSlashes(m.pkg2)
+}
+
+type spdxLicense struct{}
+
+func (m spdxLicense) isMatch(line string) bool {
+	return spdxLicenseTag.MatchString(line)
+}
+
+func (m spdxLicense) String() string {
+	return "##### Non-standard license:"
+}
+
+type spdxLicenseID struct {
+	name string
+}
+
+func (m spdxLicenseID) isMatch(line string) bool {
+	groups := spdxLicenseIDTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxLicenseID) String() string {
+	return "LicenseID: LicenseRef-" + m.name
+}
+
+type spdxExtractedText struct {
+	name string
+}
+
+func (m spdxExtractedText) isMatch(line string) bool {
+	groups := spdxExtractedTextTag.FindStringSubmatch(line)
+	if len(groups) != 2 {
+		return false
+	}
+	return groups[1] == replaceSlashes(m.name)
+}
+
+func (m spdxExtractedText) String() string {
+	return "ExtractedText: <text>" + m.name
+}
+
+type spdxExtractedClosingText struct{}
+
+func (m spdxExtractedClosingText) isMatch(line string) bool {
+	return spdxExtractedClosingTextTag.MatchString(line)
+}
+
+func (m spdxExtractedClosingText) String() string {
+	return "</text>"
+}
+
+type matcherList []matcher
+
+func (l matcherList) String() string {
+	var sb strings.Builder
+	for _, m := range l {
+		s := m.String()
+		fmt.Fprintf(&sb, "%s\n", s)
+	}
+	return sb.String()
+}
diff --git a/tools/compliance/cmd/shippedlibs/shippedlibs.go b/tools/compliance/cmd/shippedlibs/shippedlibs.go
index 9d25dd3..add6dd6 100644
--- a/tools/compliance/cmd/shippedlibs/shippedlibs.go
+++ b/tools/compliance/cmd/shippedlibs/shippedlibs.go
@@ -39,9 +39,6 @@
 	rootFS fs.FS
 }
 
-func init() {
-}
-
 func main() {
 	var expandedArgs []string
 	for _, arg := range os.Args[1:] {
diff --git a/tools/compliance/cmd/testdata/firstparty/METADATA b/tools/compliance/cmd/testdata/firstparty/METADATA
new file mode 100644
index 0000000..62b4481
--- /dev/null
+++ b/tools/compliance/cmd/testdata/firstparty/METADATA
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "1ptd"
+description: "First Party Test Data"
+third_party {
+    version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/notice/METADATA b/tools/compliance/cmd/testdata/notice/METADATA
new file mode 100644
index 0000000..302dfeb
--- /dev/null
+++ b/tools/compliance/cmd/testdata/notice/METADATA
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "noticetd"
+description: "Notice Test Data"
+third_party {
+    version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/proprietary/METADATA b/tools/compliance/cmd/testdata/proprietary/METADATA
new file mode 100644
index 0000000..72cc54a
--- /dev/null
+++ b/tools/compliance/cmd/testdata/proprietary/METADATA
@@ -0,0 +1 @@
+# comments are allowed
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
index 7ef14e9..a7c3d01 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
 module_classes: "EXECUTABLES"
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_allows_dynamic_linking"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/reciprocal/METADATA b/tools/compliance/cmd/testdata/reciprocal/METADATA
new file mode 100644
index 0000000..50cc2ef
--- /dev/null
+++ b/tools/compliance/cmd/testdata/reciprocal/METADATA
@@ -0,0 +1,5 @@
+# Comments are allowed
+description: "Reciprocal Test Data"
+third_party {
+    version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/restricted/METADATA b/tools/compliance/cmd/testdata/restricted/METADATA
new file mode 100644
index 0000000..6bcf83f
--- /dev/null
+++ b/tools/compliance/cmd/testdata/restricted/METADATA
@@ -0,0 +1,6 @@
+name {
+    id: 1
+}
+third_party {
+    version: 2
+}
diff --git a/tools/compliance/cmd/testdata/restricted/METADATA.android b/tools/compliance/cmd/testdata/restricted/METADATA.android
new file mode 100644
index 0000000..1142499
--- /dev/null
+++ b/tools/compliance/cmd/testdata/restricted/METADATA.android
@@ -0,0 +1,6 @@
+# Comments are allowed
+name: "testdata"
+description: "Restricted Test Data"
+third_party {
+    version: "1.0"
+}
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
index 7ef14e9..a7c3d01 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
 module_classes: "EXECUTABLES"
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_allows_dynamic_linking"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
index a505d4a..101ca19 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
@@ -1,7 +1,7 @@
 package_name:  "Device"
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_allows_dynamic_linking"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
index cfa0859..450290c 100644
--- a/tools/compliance/cmd/textnotice/textnotice.go
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -23,20 +23,16 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -67,22 +63,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs a text NOTICE file.
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -93,16 +77,55 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs a text NOTICE file.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE text file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -139,10 +162,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
 
-	err := textNotice(ctx, flag.Args()...)
+	err := textNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
@@ -208,7 +231,8 @@
 		fmt.Fprintln(ctx.stdout)
 	}
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/textnotice/textnotice_test.go b/tools/compliance/cmd/textnotice/textnotice_test.go
index e661a44..a902313 100644
--- a/tools/compliance/cmd/textnotice/textnotice_test.go
+++ b/tools/compliance/cmd/textnotice/textnotice_test.go
@@ -65,7 +65,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -81,7 +90,16 @@
 				usedBy{"container.zip/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -93,7 +111,13 @@
 				usedBy{"application"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -105,7 +129,12 @@
 				usedBy{"bin/bin1"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -117,7 +146,10 @@
 				usedBy{"lib/libd.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -142,6 +174,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -167,6 +206,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -186,6 +232,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -207,6 +257,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -219,7 +272,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -244,6 +300,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -269,6 +332,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -288,6 +358,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -309,6 +383,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -323,6 +400,7 @@
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -353,6 +431,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -383,6 +468,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -402,6 +494,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -426,6 +522,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -438,7 +537,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -468,6 +570,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -499,6 +608,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -519,6 +635,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -540,6 +660,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -552,7 +675,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
index 84859d7..c3f8e4c 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -24,20 +24,16 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
+	"android/soong/response"
 	"android/soong/tools/compliance"
 
 	"github.com/google/blueprint/deptools"
 )
 
 var (
-	outputFile  = flag.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
-	depsFile    = flag.String("d", "", "Where to write the deps file")
-	product     = flag.String("product", "", "The name of the product for which the notice is generated.")
-	stripPrefix = newMultiString("strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
-	title       = flag.String("title", "", "The title of the notice file.")
-
 	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
@@ -68,23 +64,10 @@
 	return installPath
 }
 
-func init() {
-	flag.Usage = func() {
-		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
-
-Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
-with ".gz".
-
-Options:
-`, filepath.Base(os.Args[0]))
-		flag.PrintDefaults()
-	}
-}
-
 // newMultiString creates a flag that allows multiple values in an array.
-func newMultiString(name, usage string) *multiString {
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
 	var f multiString
-	flag.Var(&f, name, usage)
+	flags.Var(&f, name, usage)
 	return &f
 }
 
@@ -95,16 +78,56 @@
 func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
 
 func main() {
-	flag.Parse()
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an xml NOTICE.xml or gzipped NOTICE.xml.gz file if the -o filename ends
+with ".gz".
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the NOTICE xml or xml.gz file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	title := flags.String("title", "", "The title of the notice file.")
+
+	flags.Parse(expandedArgs)
 
 	// Must specify at least one root target.
-	if flag.NArg() == 0 {
-		flag.Usage()
+	if flags.NArg() == 0 {
+		flags.Usage()
 		os.Exit(2)
 	}
 
 	if len(*outputFile) == 0 {
-		flag.Usage()
+		flags.Usage()
 		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
 		os.Exit(2)
 	} else {
@@ -141,10 +164,10 @@
 
 	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, *title, &deps}
 
-	err := xmlNotice(ctx, flag.Args()...)
+	err := xmlNotice(ctx, flags.Args()...)
 	if err != nil {
 		if err == failNoneRequested {
-			flag.Usage()
+			flags.Usage()
 		}
 		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
 		os.Exit(1)
@@ -216,7 +239,8 @@
 	}
 	fmt.Fprintln(ctx.stdout, "</licenses>")
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
index 731e783..551006f 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
@@ -65,7 +65,16 @@
 				target{"highest.apex/lib/libb.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -79,7 +88,16 @@
 				target{"container.zip/libb.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -89,7 +107,13 @@
 				target{"application", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -99,7 +123,12 @@
 				target{"bin/bin1", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -109,7 +138,10 @@
 				target{"lib/libd.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -129,6 +161,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -149,6 +188,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -164,6 +210,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -180,6 +230,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -190,7 +243,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -210,6 +266,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -230,6 +293,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -245,6 +315,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -261,6 +335,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -271,7 +348,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "restricted",
@@ -294,6 +374,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -317,6 +404,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -332,6 +426,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -350,6 +448,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -360,7 +461,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -382,6 +486,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -405,6 +516,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -421,6 +539,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -437,6 +559,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -447,7 +572,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/condition.go b/tools/compliance/condition.go
index cfe6f82..3145249 100644
--- a/tools/compliance/condition.go
+++ b/tools/compliance/condition.go
@@ -23,7 +23,7 @@
 type LicenseCondition uint16
 
 // LicenseConditionMask is a bitmask for the recognized license conditions.
-const LicenseConditionMask = LicenseCondition(0x3ff)
+const LicenseConditionMask = LicenseCondition(0x1ff)
 
 const (
 	// UnencumberedCondition identifies public domain or public domain-
@@ -41,21 +41,18 @@
 	// RestrictedCondition identifies a license with requirement to share
 	// all source code linked to the module's source.
 	RestrictedCondition = LicenseCondition(0x0010)
-	// RestrictedClasspathExceptionCondition identifies RestrictedCondition
-	// waived for dynamic linking from independent modules.
-	RestrictedClasspathExceptionCondition = LicenseCondition(0x0020)
 	// WeaklyRestrictedCondition identifies a RestrictedCondition waived
 	// for dynamic linking.
-	WeaklyRestrictedCondition = LicenseCondition(0x0040)
+	WeaklyRestrictedCondition = LicenseCondition(0x0020)
 	// ProprietaryCondition identifies a license with source privacy
 	// requirements.
-	ProprietaryCondition = LicenseCondition(0x0080)
+	ProprietaryCondition = LicenseCondition(0x0040)
 	// ByExceptionOnly identifies a license where policy requires product
 	// counsel review prior to use.
-	ByExceptionOnlyCondition = LicenseCondition(0x0100)
+	ByExceptionOnlyCondition = LicenseCondition(0x0080)
 	// NotAllowedCondition identifies a license with onerous conditions
 	// where policy prohibits use.
-	NotAllowedCondition = LicenseCondition(0x0200)
+	NotAllowedCondition = LicenseCondition(0x0100)
 )
 
 var (
@@ -66,7 +63,6 @@
 		"notice":                              NoticeCondition,
 		"reciprocal":                          ReciprocalCondition,
 		"restricted":                          RestrictedCondition,
-		"restricted_with_classpath_exception": RestrictedClasspathExceptionCondition,
 		"restricted_allows_dynamic_linking":   WeaklyRestrictedCondition,
 		"proprietary":                         ProprietaryCondition,
 		"by_exception_only":                   ByExceptionOnlyCondition,
@@ -87,8 +83,6 @@
 		return "reciprocal"
 	case RestrictedCondition:
 		return "restricted"
-	case RestrictedClasspathExceptionCondition:
-		return "restricted_with_classpath_exception"
 	case WeaklyRestrictedCondition:
 		return "restricted_allows_dynamic_linking"
 	case ProprietaryCondition:
@@ -98,5 +92,5 @@
 	case NotAllowedCondition:
 		return "not_allowed"
 	}
-	panic(fmt.Errorf("unrecognized license condition: %04x", lc))
+	panic(fmt.Errorf("unrecognized license condition: %#v", lc))
 }
diff --git a/tools/compliance/condition_test.go b/tools/compliance/condition_test.go
index 778ce4a..16ec72c 100644
--- a/tools/compliance/condition_test.go
+++ b/tools/compliance/condition_test.go
@@ -21,22 +21,22 @@
 func TestConditionSetHas(t *testing.T) {
 	impliesShare := ImpliesShared
 
-	t.Logf("testing with imliesShare=%04x", impliesShare)
+	t.Logf("testing with imliesShare=%#v", impliesShare)
 
 	if impliesShare.HasAny(NoticeCondition) {
-		t.Errorf("impliesShare.HasAny(\"notice\"=%04x) got true, want false", NoticeCondition)
+		t.Errorf("impliesShare.HasAny(\"notice\"=%#v) got true, want false", NoticeCondition)
 	}
 
 	if !impliesShare.HasAny(RestrictedCondition) {
-		t.Errorf("impliesShare.HasAny(\"restricted\"=%04x) got false, want true", RestrictedCondition)
+		t.Errorf("impliesShare.HasAny(\"restricted\"=%#v) got false, want true", RestrictedCondition)
 	}
 
 	if !impliesShare.HasAny(ReciprocalCondition) {
-		t.Errorf("impliesShare.HasAny(\"reciprocal\"=%04x) got false, want true", ReciprocalCondition)
+		t.Errorf("impliesShare.HasAny(\"reciprocal\"=%#v) got false, want true", ReciprocalCondition)
 	}
 
 	if impliesShare.HasAny(LicenseCondition(0x0000)) {
-		t.Errorf("impliesShare.HasAny(nil=%04x) got true, want false", LicenseCondition(0x0000))
+		t.Errorf("impliesShare.HasAny(nil=%#v) got true, want false", LicenseCondition(0x0000))
 	}
 }
 
@@ -44,7 +44,7 @@
 	for expected, condition := range RecognizedConditionNames {
 		actual := condition.Name()
 		if expected != actual {
-			t.Errorf("unexpected name for condition %04x: got %s, want %s", condition, actual, expected)
+			t.Errorf("unexpected name for condition %#v: got %s, want %s", condition, actual, expected)
 		}
 	}
 }
@@ -62,6 +62,6 @@
 		t.Errorf("invalid condition unexpected name: got %s, wanted panic", name)
 	}()
 	if !panicked {
-		t.Errorf("no expected panic for %04x.Name(): got no panic, wanted panic", lc)
+		t.Errorf("no expected panic for %#v.Name(): got no panic, wanted panic", lc)
 	}
 }
diff --git a/tools/compliance/conditionset_test.go b/tools/compliance/conditionset_test.go
index c91912f..020cc0c 100644
--- a/tools/compliance/conditionset_test.go
+++ b/tools/compliance/conditionset_test.go
@@ -96,14 +96,13 @@
 		{
 			name:       "everything",
 			conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
-			plus:       &[]string{"restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+			plus:       &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
 			matchingAny: map[string][]string{
 				"unencumbered":                        []string{"unencumbered"},
 				"permissive":                          []string{"permissive"},
 				"notice":                              []string{"notice"},
 				"reciprocal":                          []string{"reciprocal"},
 				"restricted":                          []string{"restricted"},
-				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
 				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
 				"proprietary":                         []string{"proprietary"},
 				"by_exception_only":                   []string{"by_exception_only"},
@@ -116,7 +115,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -131,7 +129,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -151,7 +148,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -168,7 +164,6 @@
 				"notice":                              []string{"notice"},
 				"reciprocal":                          []string{"reciprocal"},
 				"restricted":                          []string{"restricted"},
-				"restricted_with_classpath_exception": []string{},
 				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
 				"proprietary":                         []string{"proprietary"},
 				"by_exception_only":                   []string{"by_exception_only"},
@@ -195,7 +190,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -208,7 +202,6 @@
 				"notice":                              []string{"notice"},
 				"reciprocal":                          []string{"reciprocal"},
 				"restricted":                          []string{"restricted"},
-				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
 				"restricted_allows_dynamic_linking":   []string{},
 				"proprietary":                         []string{"proprietary"},
 				"by_exception_only":                   []string{"by_exception_only"},
@@ -221,7 +214,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -235,7 +227,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -247,7 +238,6 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_with_classpath_exception",
 				"restricted_allows_dynamic_linking",
 				"proprietary",
 				"by_exception_only",
@@ -259,7 +249,6 @@
 				"notice":                              []string{},
 				"reciprocal":                          []string{},
 				"restricted":                          []string{},
-				"restricted_with_classpath_exception": []string{},
 				"restricted_allows_dynamic_linking":   []string{},
 				"proprietary":                         []string{},
 				"by_exception_only":                   []string{},
@@ -270,21 +259,20 @@
 		},
 		{
 			name:       "restrictedplus",
-			conditions: []string{"restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking"},
+			conditions: []string{"restricted", "restricted_allows_dynamic_linking"},
 			plus:       &[]string{"permissive", "notice", "restricted", "proprietary"},
 			matchingAny: map[string][]string{
 				"unencumbered":                        []string{},
 				"permissive":                          []string{"permissive"},
 				"notice":                              []string{"notice"},
 				"restricted":                          []string{"restricted"},
-				"restricted_with_classpath_exception": []string{"restricted_with_classpath_exception"},
 				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
 				"proprietary":                         []string{"proprietary"},
 				"restricted|proprietary":              []string{"restricted", "proprietary"},
 				"by_exception_only":                   []string{},
 				"proprietary|by_exception_only":       []string{"proprietary"},
 			},
-			expected: []string{"permissive", "notice", "restricted", "restricted_with_classpath_exception", "restricted_allows_dynamic_linking", "proprietary"},
+			expected: []string{"permissive", "notice", "restricted", "restricted_allows_dynamic_linking", "proprietary"},
 		},
 	}
 	for _, tt := range tests {
@@ -342,11 +330,11 @@
 				actual := cs.MatchingAny(toConditions(strings.Split(data, "|"))...)
 				actualNames := actual.Names()
 
-				t.Logf("MatchingAny(%s): actual set %04x %s", data, actual, actual.String())
-				t.Logf("MatchingAny(%s): expected set %04x %s", data, expected, expected.String())
+				t.Logf("MatchingAny(%s): actual set %#v %s", data, actual, actual.String())
+				t.Logf("MatchingAny(%s): expected set %#v %s", data, expected, expected.String())
 
 				if actual != expected {
-					t.Errorf("MatchingAny(%s): got %04x, want %04x", data, actual, expected)
+					t.Errorf("MatchingAny(%s): got %#v, want %#v", data, actual, expected)
 					continue
 				}
 				if len(actualNames) != len(expectedNames) {
@@ -382,11 +370,11 @@
 				actual := cs.MatchingAnySet(NewLicenseConditionSet(toConditions(strings.Split(data, "|"))...))
 				actualNames := actual.Names()
 
-				t.Logf("MatchingAnySet(%s): actual set %04x %s", data, actual, actual.String())
-				t.Logf("MatchingAnySet(%s): expected set %04x %s", data, expected, expected.String())
+				t.Logf("MatchingAnySet(%s): actual set %#v %s", data, actual, actual.String())
+				t.Logf("MatchingAnySet(%s): expected set %#v %s", data, expected, expected.String())
 
 				if actual != expected {
-					t.Errorf("MatchingAnySet(%s): got %04x, want %04x", data, actual, expected)
+					t.Errorf("MatchingAnySet(%s): got %#v, want %#v", data, actual, expected)
 					continue
 				}
 				if len(actualNames) != len(expectedNames) {
@@ -426,11 +414,11 @@
 
 			actualNames := actual.Names()
 
-			t.Logf("actual license condition set: %04x %s", actual, actual.String())
-			t.Logf("expected license condition set: %04x %s", expected, expected.String())
+			t.Logf("actual license condition set: %#v %s", actual, actual.String())
+			t.Logf("expected license condition set: %#v %s", expected, expected.String())
 
 			if actual != expected {
-				t.Errorf("checkExpected: got %04x, want %04x", actual, expected)
+				t.Errorf("checkExpected: got %#v, want %#v", actual, expected)
 				return false
 			}
 
@@ -487,7 +475,7 @@
 
 			notExpected := (AllLicenseConditions &^ expected)
 			notExpectedList := notExpected.AsList()
-			t.Logf("not expected license condition set: %04x %s", notExpected, notExpected.String())
+			t.Logf("not expected license condition set: %#v %s", notExpected, notExpected.String())
 
 			if len(tt.expected) == 0 {
 				if actual.HasAny(append(expectedConditions, notExpectedList...)...) {
@@ -526,11 +514,11 @@
 
 			actualNames := actual.Names()
 
-			t.Logf("actual license condition set: %04x %s", actual, actual.String())
-			t.Logf("expected license condition set: %04x %s", expected, expected.String())
+			t.Logf("actual license condition set: %#v %s", actual, actual.String())
+			t.Logf("expected license condition set: %#v %s", expected, expected.String())
 
 			if actual != expected {
-				t.Errorf("checkExpectedSet: got %04x, want %04x", actual, expected)
+				t.Errorf("checkExpectedSet: got %#v, want %#v", actual, expected)
 				return false
 			}
 
@@ -581,7 +569,7 @@
 			}
 
 			notExpected := (AllLicenseConditions &^ expected)
-			t.Logf("not expected license condition set: %04x %s", notExpected, notExpected.String())
+			t.Logf("not expected license condition set: %#v %s", notExpected, notExpected.String())
 
 			if len(tt.expected) == 0 {
 				if actual.MatchesAnySet(expected, notExpected) {
@@ -606,10 +594,10 @@
 				t.Errorf("actual.Difference({expected}).IsEmpty(): want true, got false")
 			}
 			if expected != actual.Intersection(expected) {
-				t.Errorf("expected == actual.Intersection({expected}): want true, got false (%04x != %04x)", expected, actual.Intersection(expected))
+				t.Errorf("expected == actual.Intersection({expected}): want true, got false (%#v != %#v)", expected, actual.Intersection(expected))
 			}
 			if actual != actual.Intersection(expected) {
-				t.Errorf("actual == actual.Intersection({expected}): want true, got false (%04x != %04x)", actual, actual.Intersection(expected))
+				t.Errorf("actual == actual.Intersection({expected}): want true, got false (%#v != %#v)", actual, actual.Intersection(expected))
 			}
 			return true
 		}
diff --git a/tools/compliance/doc.go b/tools/compliance/doc.go
index a47c1cf..5ced9ee 100644
--- a/tools/compliance/doc.go
+++ b/tools/compliance/doc.go
@@ -11,6 +11,10 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
+
+// Much of this content appears too in README.md
+// When changing this file consider whether the change also applies to README.md
+
 /*
 
 Package compliance provides an approved means for reading, consuming, and
@@ -31,6 +35,13 @@
 artifacts in a release or distribution. While conceptually immutable, parts of
 the graph may be loaded or evaluated lazily.
 
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
 LicenseCondition
 ----------------
 
@@ -51,17 +62,13 @@
 
 `ActsOn` is the target to share, give notice for, hide etc.
 
-`Resolves` is the license condition that the action resolves.
+`Resolves` is the set of condition types that the action resolves.
 
-Remember: Each license condition pairs a condition name with an originating
-target so each resolution in a ResolutionSet has two targets it applies to and
-one target from which it originates, all of which may be the same target.
-
-For most condition types, `ActsOn` and `Resolves.Origin` will be the same
-target. For example, a notice condition policy means attribution or notice must
-be given for the target where the condition originates. Likewise, a proprietary
-condition policy means the privacy of the target where the condition originates
-must be respected. i.e. The thing acted on is the origin.
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
 
 Restricted conditions are different. The infectious nature of restricted often
 means sharing code that is not the target where the restricted condition
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index 61e2158..088915a 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -4,9 +4,17 @@
 
 replace google.golang.org/protobuf v0.0.0 => ../../../../external/golang-protobuf
 
-require android/soong v0.0.0
+require (
+	android/soong v0.0.0
+	github.com/google/blueprint v0.0.0
+)
 
-replace android/soong v0.0.0 => ../../../soong									      
+require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
+
+replace android/soong v0.0.0 => ../../../soong
+
+replace github.com/google/blueprint => ../../../blueprint
+
 // Indirect deps from golang-protobuf
 exclude github.com/golang/protobuf v1.5.0
 
diff --git a/tools/compliance/go.sum b/tools/compliance/go.sum
new file mode 100644
index 0000000..cbe76d9
--- /dev/null
+++ b/tools/compliance/go.sum
@@ -0,0 +1,2 @@
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f h1:uF6paiQQebLeSXkrTqHqz0MXhXXS1KgF41eUdBNvxK0=
+golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
diff --git a/tools/compliance/graph.go b/tools/compliance/graph.go
index e73ab46..80a2f47 100644
--- a/tools/compliance/graph.go
+++ b/tools/compliance/graph.go
@@ -58,13 +58,11 @@
 	/// (guarded by mu)
 	targets map[string]*TargetNode
 
-	// wgBU becomes non-nil when the bottom-up resolve begins and reaches 0
-	// (i.e. Wait() proceeds) when the bottom-up resolve completes. (guarded by mu)
-	wgBU *sync.WaitGroup
+	// onceBottomUp makes sure the bottom-up resolve walk only happens one time.
+	onceBottomUp sync.Once
 
-	// wgTD becomes non-nil when the top-down resolve begins and reaches 0 (i.e. Wait()
-	// proceeds) when the top-down resolve completes. (guarded by mu)
-	wgTD *sync.WaitGroup
+	// onceTopDown makes sure the top-down resolve walk only happens one time.
+	onceTopDown sync.Once
 
 	// shippedNodes caches the results of a full walk of nodes identifying targets
 	// distributed either directly or as derivative works. (creation guarded by mu)
@@ -139,6 +137,24 @@
 	return e.annotations
 }
 
+// IsRuntimeDependency returns true for edges representing shared libraries
+// linked dynamically at runtime.
+func (e *TargetEdge) IsRuntimeDependency() bool {
+	return edgeIsDynamicLink(e)
+}
+
+// IsDerivation returns true for edges where the target is a derivative
+// work of dependency.
+func (e *TargetEdge) IsDerivation() bool {
+	return edgeIsDerivation(e)
+}
+
+// IsBuildTool returns true for edges where the target is built
+// by dependency.
+func (e *TargetEdge) IsBuildTool() bool {
+	return !edgeIsDerivation(e) && !edgeIsDynamicLink(e)
+}
+
 // String returns a human-readable string representation of the edge.
 func (e *TargetEdge) String() string {
 	return fmt.Sprintf("%s -[%s]> %s", e.target.name, strings.Join(e.annotations.AsList(), ", "), e.dependency.name)
@@ -188,6 +204,11 @@
 	return s.edge.dependency
 }
 
+// Edge describes the target edge.
+func (s TargetEdgePathSegment) Edge() *TargetEdge {
+	return s.edge
+}
+
 // Annotations describes the type of edge by the set of annotations attached to
 // it.
 //
@@ -300,21 +321,9 @@
 	return tn.proto.GetPackageName()
 }
 
-// ModuleTypes returns the list of module types implementing the target.
-// (unordered)
-//
-// In an ideal world, only 1 module type would implement each target, but the
-// interactions between Soong and Make for host versus product and for a
-// variety of architectures sometimes causes multiple module types per target
-// (often a regular build target and a prebuilt.)
-func (tn *TargetNode) ModuleTypes() []string {
-	return append([]string{}, tn.proto.ModuleTypes...)
-}
-
-// ModuleClasses returns the list of module classes implementing the target.
-// (unordered)
-func (tn *TargetNode) ModuleClasses() []string {
-	return append([]string{}, tn.proto.ModuleClasses...)
+// ModuleName returns the module name of the target.
+func (tn *TargetNode) ModuleName() string {
+	return tn.proto.GetModuleName()
 }
 
 // Projects returns the projects defining the target node. (unordered)
@@ -326,14 +335,6 @@
 	return append([]string{}, tn.proto.Projects...)
 }
 
-// LicenseKinds returns the list of license kind names for the module or
-// target. (unordered)
-//
-// e.g. SPDX-license-identifier-MIT or legacy_proprietary
-func (tn *TargetNode) LicenseKinds() []string {
-	return append([]string{}, tn.proto.LicenseKinds...)
-}
-
 // LicenseConditions returns a copy of the set of license conditions
 // originating at the target. The values that appear and how each is resolved
 // is a matter of policy. (unordered)
diff --git a/tools/compliance/noticeindex.go b/tools/compliance/noticeindex.go
index f082383..dbfede6 100644
--- a/tools/compliance/noticeindex.go
+++ b/tools/compliance/noticeindex.go
@@ -15,7 +15,6 @@
 package compliance
 
 import (
-	"bufio"
 	"crypto/md5"
 	"fmt"
 	"io"
@@ -25,16 +24,11 @@
 	"regexp"
 	"sort"
 	"strings"
-)
 
-const (
-	noProjectName = "\u2205"
+	"android/soong/tools/compliance/projectmetadata"
 )
 
 var (
-	nameRegexp         = regexp.MustCompile(`^\s*name\s*:\s*"(.*)"\s*$`)
-	descRegexp         = regexp.MustCompile(`^\s*description\s*:\s*"(.*)"\s*$`)
-	versionRegexp      = regexp.MustCompile(`^\s*version\s*:\s*"(.*)"\s*$`)
 	licensesPathRegexp = regexp.MustCompile(`licen[cs]es?/`)
 )
 
@@ -43,6 +37,8 @@
 type NoticeIndex struct {
 	// lg identifies the license graph to which the index applies.
 	lg *LicenseGraph
+	// pmix indexes project metadata
+	pmix *projectmetadata.Index
 	// rs identifies the set of resolutions upon which the index is based.
 	rs ResolutionSet
 	// shipped identifies the set of target nodes shipped directly or as derivative works.
@@ -75,6 +71,7 @@
 	}
 	ni := &NoticeIndex{
 		lg:             lg,
+		pmix:           projectmetadata.NewIndex(rootFS),
 		rs:             rs,
 		shipped:        ShippedNodes(lg),
 		rootFS:         rootFS,
@@ -110,9 +107,12 @@
 		return hashes, nil
 	}
 
-	link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) {
+	link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) error {
 		for h := range hashes {
-			libName := ni.getLibName(tn, h)
+			libName, err := ni.getLibName(tn, h)
+			if err != nil {
+				return err
+			}
 			if _, ok := ni.libHash[libName]; !ok {
 				ni.libHash[libName] = make(map[hash]struct{})
 			}
@@ -145,6 +145,11 @@
 				}
 			}
 		}
+		return nil
+	}
+
+	cacheMetadata := func(tn *TargetNode) {
+		ni.pmix.MetadataForProjects(tn.Projects()...)
 	}
 
 	// returns error from walk below.
@@ -157,13 +162,17 @@
 		if !ni.shipped.Contains(tn) {
 			return false
 		}
+		go cacheMetadata(tn)
 		installPaths := getInstallPaths(tn, path)
 		var hashes map[hash]struct{}
 		hashes, err = index(tn)
 		if err != nil {
 			return false
 		}
-		link(tn, hashes, installPaths)
+		err = link(tn, hashes, installPaths)
+		if err != nil {
+			return false
+		}
 		if tn.IsContainer() {
 			return true
 		}
@@ -173,7 +182,10 @@
 			if err != nil {
 				return false
 			}
-			link(r.actsOn, hashes, installPaths)
+			err = link(r.actsOn, hashes, installPaths)
+			if err != nil {
+				return false
+			}
 		}
 		return false
 	})
@@ -214,12 +226,18 @@
 		close(c)
 	}()
 	return c
+
 }
 
-// InputNoticeFiles returns the list of files that were hashed during IndexLicenseTexts.
-func (ni *NoticeIndex) InputNoticeFiles() []string {
-	files := append([]string(nil), ni.files...)
-	sort.Strings(files)
+// InputFiles returns the complete list of files read during indexing.
+func (ni *NoticeIndex) InputFiles() []string {
+	projectMeta := ni.pmix.AllMetadataFiles()
+	files := make([]string, 0, len(ni.files) + len(ni.lg.targets) + len(projectMeta))
+	files = append(files, ni.files...)
+	for f := range ni.lg.targets {
+		files = append(files, f)
+	}
+	files = append(files, projectMeta...)
 	return files
 }
 
@@ -308,15 +326,18 @@
 }
 
 // getLibName returns the name of the library associated with `noticeFor`.
-func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) string {
+func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) (string, error) {
 	for _, text := range noticeFor.LicenseTexts() {
 		if !strings.Contains(text, ":") {
 			if ni.hash[text].key != h.key {
 				continue
 			}
-			ln := ni.checkMetadataForLicenseText(noticeFor, text)
+			ln, err := ni.checkMetadataForLicenseText(noticeFor, text)
+			if err != nil {
+				return "", err
+			}
 			if len(ln) > 0 {
-				return ln
+				return ln, nil
 			}
 			continue
 		}
@@ -331,17 +352,20 @@
 		if err != nil {
 			continue
 		}
-		return ln
+		return ln, nil
 	}
 	// use name from METADATA if available
-	ln := ni.checkMetadata(noticeFor)
+	ln, err := ni.checkMetadata(noticeFor)
+	if err != nil {
+		return "", err
+	}
 	if len(ln) > 0 {
-		return ln
+		return ln, nil
 	}
 	// use package_name: from license{} module if available
 	pn := noticeFor.PackageName()
 	if len(pn) > 0 {
-		return pn
+		return pn, nil
 	}
 	for _, p := range noticeFor.Projects() {
 		if strings.HasPrefix(p, "prebuilts/") {
@@ -360,18 +384,17 @@
 						continue
 					}
 				}
-				for r, prefix := range SafePrebuiltPrefixes {
-					match := r.FindString(licenseText)
+				for _, safePrebuiltPrefix := range safePrebuiltPrefixes {
+					match := safePrebuiltPrefix.re.FindString(licenseText)
 					if len(match) == 0 {
 						continue
 					}
-					strip := SafePathPrefixes[prefix]
-					if strip {
+					if safePrebuiltPrefix.strip {
 						// strip entire prefix
 						match = licenseText[len(match):]
 					} else {
 						// strip from prebuilts/ until safe prefix
-						match = licenseText[len(match)-len(prefix):]
+						match = licenseText[len(match)-len(safePrebuiltPrefix.prefix):]
 					}
 					// remove LICENSE or NOTICE or other filename
 					li := strings.LastIndex(match, "/")
@@ -386,17 +409,17 @@
 							match = match[:li]
 						}
 					}
-					return match
+					return match, nil
 				}
 				break
 			}
 		}
-		for prefix, strip := range SafePathPrefixes {
-			if strings.HasPrefix(p, prefix) {
-				if strip {
-					return p[len(prefix):]
+		for _, safePathPrefix := range safePathPrefixes {
+			if strings.HasPrefix(p, safePathPrefix.prefix) {
+				if safePathPrefix.strip {
+					return p[len(safePathPrefix.prefix):], nil
 				} else {
-					return p
+					return p, nil
 				}
 			}
 		}
@@ -411,35 +434,26 @@
 	if fi > 0 {
 		n = n[:fi]
 	}
-	return n
+	return n, nil
 }
 
 // checkMetadata tries to look up a library name from a METADATA file associated with `noticeFor`.
-func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) string {
-	for _, p := range noticeFor.Projects() {
-		if name, ok := ni.projectName[p]; ok {
-			if name == noProjectName {
-				continue
-			}
-			return name
-		}
-		name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
-		if err != nil {
-			ni.projectName[p] = noProjectName
-			continue
-		}
-		if len(name) == 0 {
-			ni.projectName[p] = noProjectName
-			continue
-		}
-		ni.projectName[p] = name
-		return name
+func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) (string, error) {
+	pms, err := ni.pmix.MetadataForProjects(noticeFor.Projects()...)
+	if err != nil {
+		return "", err
 	}
-	return ""
+	for _, pm := range pms {
+		name := pm.VersionedName()
+		if name != "" {
+			return name, nil
+		}
+	}
+	return "", nil
 }
 
 // checkMetadataForLicenseText
-func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) string {
+func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) (string, error) {
 	p := ""
 	for _, proj := range noticeFor.Projects() {
 		if strings.HasPrefix(licenseText, proj) {
@@ -457,79 +471,17 @@
 				p = filepath.Dir(p)
 				continue
 			}
-			return ""
+			return "", nil
 		}
 	}
-	if name, ok := ni.projectName[p]; ok {
-		if name == noProjectName {
-			return ""
-		}
-		return name
-	}
-	name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
-	if err == nil && len(name) > 0 {
-		ni.projectName[p] = name
-		return name
-	}
-	ni.projectName[p] = noProjectName
-	return ""
-}
-
-// checkMetadataFile tries to look up a library name from a METADATA file at `path`.
-func (ni *NoticeIndex) checkMetadataFile(path string) (string, error) {
-	f, err := ni.rootFS.Open(path)
+	pms, err := ni.pmix.MetadataForProjects(p)
 	if err != nil {
 		return "", err
 	}
-	name := ""
-	description := ""
-	version := ""
-	s := bufio.NewScanner(f)
-	for s.Scan() {
-		line := s.Text()
-		m := nameRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				name = m[1]
-			}
-			if version != "" {
-				break
-			}
-			continue
-		}
-		m = versionRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				version = m[1]
-			}
-			if name != "" {
-				break
-			}
-			continue
-		}
-		m = descRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				description = m[1]
-			}
-		}
+	if pms == nil {
+		return "", nil
 	}
-	_ = s.Err()
-	_ = f.Close()
-	if name != "" {
-		if version != "" {
-			if version[0] == 'v' || version[0] == 'V' {
-				return name + "_" + version, nil
-			} else {
-				return name + "_v_" + version, nil
-			}
-		}
-		return name, nil
-	}
-	if description != "" {
-		return description, nil
-	}
-	return "", nil
+	return pms[0].VersionedName(), nil
 }
 
 // addText reads and indexes the content of a license text file.
diff --git a/tools/compliance/policy_policy.go b/tools/compliance/policy_policy.go
index 60bdf48..23e25c6 100644
--- a/tools/compliance/policy_policy.go
+++ b/tools/compliance/policy_policy.go
@@ -29,30 +29,31 @@
 		"toolchain": "toolchain",
 	}
 
-	// SafePathPrefixes maps the path prefixes presumed not to contain any
+	// safePathPrefixes maps the path prefixes presumed not to contain any
 	// proprietary or confidential pathnames to whether to strip the prefix
 	// from the path when used as the library name for notices.
-	SafePathPrefixes = map[string]bool{
-		"external/":    true,
-		"art/":         false,
-		"build/":       false,
-		"cts/":         false,
-		"dalvik/":      false,
-		"developers/":  false,
-		"development/": false,
-		"frameworks/":  false,
-		"packages/":    true,
-		"prebuilts/":   false,
-		"sdk/":         false,
-		"system/":      false,
-		"test/":        false,
-		"toolchain/":   false,
-		"tools/":       false,
+	safePathPrefixes = []safePathPrefixesType{
+		{"external/", true},
+		{"art/", false},
+		{"build/", false},
+		{"cts/", false},
+		{"dalvik/", false},
+		{"developers/", false},
+		{"development/", false},
+		{"frameworks/", false},
+		{"packages/", true},
+		{"prebuilts/module_sdk/", true},
+		{"prebuilts/", false},
+		{"sdk/", false},
+		{"system/", false},
+		{"test/", false},
+		{"toolchain/", false},
+		{"tools/", false},
 	}
 
-	// SafePrebuiltPrefixes maps the regular expression to match a prebuilt
+	// safePrebuiltPrefixes maps the regular expression to match a prebuilt
 	// containing the path of a safe prefix to the safe prefix.
-	SafePrebuiltPrefixes = make(map[*regexp.Regexp]string)
+	safePrebuiltPrefixes []safePrebuiltPrefixesType
 
 	// ImpliesUnencumbered lists the condition names representing an author attempt to disclaim copyright.
 	ImpliesUnencumbered = LicenseConditionSet(UnencumberedCondition)
@@ -62,14 +63,13 @@
 
 	// ImpliesNotice lists the condition names implying a notice or attribution policy.
 	ImpliesNotice = LicenseConditionSet(UnencumberedCondition | PermissiveCondition | NoticeCondition | ReciprocalCondition |
-		RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition |
-		ProprietaryCondition | ByExceptionOnlyCondition)
+		RestrictedCondition | WeaklyRestrictedCondition | ProprietaryCondition | ByExceptionOnlyCondition)
 
 	// ImpliesReciprocal lists the condition names implying a local source-sharing policy.
 	ImpliesReciprocal = LicenseConditionSet(ReciprocalCondition)
 
 	// Restricted lists the condition names implying an infectious source-sharing policy.
-	ImpliesRestricted = LicenseConditionSet(RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition)
+	ImpliesRestricted = LicenseConditionSet(RestrictedCondition | WeaklyRestrictedCondition)
 
 	// ImpliesProprietary lists the condition names implying a confidentiality policy.
 	ImpliesProprietary = LicenseConditionSet(ProprietaryCondition)
@@ -81,9 +81,19 @@
 	ImpliesPrivate = LicenseConditionSet(ProprietaryCondition)
 
 	// ImpliesShared lists the condition names implying a source-code sharing policy.
-	ImpliesShared = LicenseConditionSet(ReciprocalCondition | RestrictedCondition | RestrictedClasspathExceptionCondition | WeaklyRestrictedCondition)
+	ImpliesShared = LicenseConditionSet(ReciprocalCondition | RestrictedCondition | WeaklyRestrictedCondition)
 )
 
+type safePathPrefixesType struct {
+	prefix string
+	strip  bool
+}
+
+type safePrebuiltPrefixesType struct {
+	safePathPrefixesType
+	re *regexp.Regexp
+}
+
 var (
 	anyLgpl      = regexp.MustCompile(`^SPDX-license-identifier-LGPL.*`)
 	versionedGpl = regexp.MustCompile(`^SPDX-license-identifier-GPL-\p{N}.*`)
@@ -92,12 +102,13 @@
 )
 
 func init() {
-	for prefix := range SafePathPrefixes {
-		if prefix == "prebuilts/" {
+	for _, safePathPrefix := range safePathPrefixes {
+		if strings.HasPrefix(safePathPrefix.prefix, "prebuilts/") {
 			continue
 		}
-		r := regexp.MustCompile("^prebuilts/[^ ]*/" + prefix)
-		SafePrebuiltPrefixes[r] = prefix
+		r := regexp.MustCompile("^prebuilts/(?:runtime/mainline/)?" + safePathPrefix.prefix)
+		safePrebuiltPrefixes = append(safePrebuiltPrefixes,
+			safePrebuiltPrefixesType{safePathPrefix, r})
 	}
 }
 
@@ -106,36 +117,6 @@
 func LicenseConditionSetFromNames(tn *TargetNode, names ...string) LicenseConditionSet {
 	cs := NewLicenseConditionSet()
 	for _, name := range names {
-		if name == "restricted" {
-			if 0 == len(tn.LicenseKinds()) {
-				cs = cs.Plus(RestrictedCondition)
-				continue
-			}
-			hasLgpl := false
-			hasClasspath := false
-			hasGeneric := false
-			for _, kind := range tn.LicenseKinds() {
-				if strings.HasSuffix(kind, "-with-classpath-exception") {
-					cs = cs.Plus(RestrictedClasspathExceptionCondition)
-					hasClasspath = true
-				} else if anyLgpl.MatchString(kind) {
-					cs = cs.Plus(WeaklyRestrictedCondition)
-					hasLgpl = true
-				} else if versionedGpl.MatchString(kind) {
-					cs = cs.Plus(RestrictedCondition)
-				} else if genericGpl.MatchString(kind) {
-					hasGeneric = true
-				} else if kind == "legacy_restricted" || ccBySa.MatchString(kind) {
-					cs = cs.Plus(RestrictedCondition)
-				} else {
-					cs = cs.Plus(RestrictedCondition)
-				}
-			}
-			if hasGeneric && !hasLgpl && !hasClasspath {
-				cs = cs.Plus(RestrictedCondition)
-			}
-			continue
-		}
 		if lc, ok := RecognizedConditionNames[name]; ok {
 			cs |= LicenseConditionSet(lc)
 		}
@@ -202,9 +183,6 @@
 	}
 
 	result |= depConditions & LicenseConditionSet(RestrictedCondition)
-	if 0 != (depConditions&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && !edgeNodesAreIndependentModules(e) {
-		result |= LicenseConditionSet(RestrictedClasspathExceptionCondition)
-	}
 	return result
 }
 
@@ -241,9 +219,6 @@
 		return result
 	}
 	result = result.Minus(WeaklyRestrictedCondition)
-	if edgeNodesAreIndependentModules(e) {
-		result = result.Minus(RestrictedClasspathExceptionCondition)
-	}
 	return result
 }
 
@@ -261,10 +236,7 @@
 		return NewLicenseConditionSet()
 	}
 
-	result &= LicenseConditionSet(RestrictedCondition | RestrictedClasspathExceptionCondition)
-	if 0 != (result&LicenseConditionSet(RestrictedClasspathExceptionCondition)) && edgeNodesAreIndependentModules(e) {
-		result &= LicenseConditionSet(RestrictedCondition)
-	}
+	result &= LicenseConditionSet(RestrictedCondition)
 	return result
 }
 
@@ -281,9 +253,3 @@
 	isToolchain := e.annotations.HasAnnotation("toolchain")
 	return !isDynamic && !isToolchain
 }
-
-// edgeNodesAreIndependentModules returns true for edges where the target and
-// dependency are independent modules.
-func edgeNodesAreIndependentModules(e *TargetEdge) bool {
-	return e.target.PackageName() != e.dependency.PackageName()
-}
diff --git a/tools/compliance/policy_policy_test.go b/tools/compliance/policy_policy_test.go
index 27ce16c..6188eb2 100644
--- a/tools/compliance/policy_policy_test.go
+++ b/tools/compliance/policy_policy_test.go
@@ -20,6 +20,8 @@
 	"sort"
 	"strings"
 	"testing"
+
+	"android/soong/tools/compliance/testfs"
 )
 
 func TestPolicy_edgeConditions(t *testing.T) {
@@ -85,19 +87,13 @@
 		{
 			name: "independentmodulestatic",
 			edge: annotated{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
-			expectedDepActions: []string{
-				"apacheBin.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
-				"gplWithClasspathException.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
-			},
+			expectedDepActions: []string{},
 			expectedTargetConditions: []string{},
 		},
 		{
 			name: "dependentmodule",
 			edge: annotated{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
-			expectedDepActions: []string{
-				"dependentModule.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
-				"gplWithClasspathException.meta_lic:gplWithClasspathException.meta_lic:restricted_with_classpath_exception",
-			},
+			expectedDepActions: []string{},
 			expectedTargetConditions: []string{},
 		},
 
@@ -166,13 +162,13 @@
 			name:                     "independentmodulereversestatic",
 			edge:                     annotated{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			expectedDepActions:       []string{},
-			expectedTargetConditions: []string{"gplWithClasspathException.meta_lic:restricted_with_classpath_exception"},
+			expectedTargetConditions: []string{},
 		},
 		{
 			name:                     "dependentmodulereverse",
 			edge:                     annotated{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			expectedDepActions:       []string{},
-			expectedTargetConditions: []string{"gplWithClasspathException.meta_lic:restricted_with_classpath_exception"},
+			expectedTargetConditions: []string{},
 		},
 		{
 			name: "ponr",
@@ -216,7 +212,7 @@
 	}
 	for _, tt := range tests {
 		t.Run(tt.name, func(t *testing.T) {
-			fs := make(testFS)
+			fs := make(testfs.TestFS)
 			stderr := &bytes.Buffer{}
 			target := meta[tt.edge.target] + fmt.Sprintf("deps: {\n  file: \"%s\"\n", tt.edge.dep)
 			for _, ann := range tt.edge.annotations {
@@ -257,9 +253,9 @@
 						otherCs := otn.LicenseConditions()
 						depConditions |= otherCs
 					}
-					t.Logf("calculate target actions for edge=%s, dep conditions=%04x, treatAsAggregate=%v", edge.String(), depConditions, tt.treatAsAggregate)
+					t.Logf("calculate target actions for edge=%s, dep conditions=%#v %s, treatAsAggregate=%v", edge.String(), depConditions, depConditions, tt.treatAsAggregate)
 					csActual := depConditionsPropagatingToTarget(lg, edge, depConditions, tt.treatAsAggregate)
-					t.Logf("calculated target conditions as %04x{%s}", csActual, strings.Join(csActual.Names(), ", "))
+					t.Logf("calculated target conditions as %#v %s", csActual, csActual)
 					csExpected := NewLicenseConditionSet()
 					for _, triple := range tt.expectedDepActions {
 						fields := strings.Split(triple, ":")
@@ -269,9 +265,9 @@
 						}
 						csExpected |= expectedConditions
 					}
-					t.Logf("expected target conditions as %04x{%s}", csExpected, strings.Join(csExpected.Names(), ", "))
+					t.Logf("expected target conditions as %#v %s", csExpected, csExpected)
 					if csActual != csExpected {
-						t.Errorf("unexpected license conditions: got %04x, want %04x", csActual, csExpected)
+						t.Errorf("unexpected license conditions: got %#v, want %#v", csActual, csExpected)
 					}
 				})
 			}
diff --git a/tools/compliance/policy_resolve.go b/tools/compliance/policy_resolve.go
index d357aec..fc8ed4c 100644
--- a/tools/compliance/policy_resolve.go
+++ b/tools/compliance/policy_resolve.go
@@ -49,89 +49,71 @@
 func TraceBottomUpConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
-	lg.mu.Lock()
-	wg := lg.wgBU
+	lg.onceBottomUp.Do(func() {
+		// amap identifes targets previously walked. (guarded by mu)
+		amap := make(map[*TargetNode]struct{})
 
-	if wg != nil {
-		lg.mu.Unlock()
-		wg.Wait()
-		return
-	}
-	wg = &sync.WaitGroup{}
-	wg.Add(1)
-	lg.wgBU = wg
-	lg.mu.Unlock()
+		// mu guards concurrent access to amap
+		var mu sync.Mutex
 
-	// amap identifes targets previously walked. (guarded by mu)
-	amap := make(map[*TargetNode]struct{})
+		var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
 
-	// cmap identifies targets previously walked as pure aggregates. i.e. as containers
-	// (guarded by mu)
-	cmap := make(map[*TargetNode]struct{})
-	var mu sync.Mutex
+		walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
+			priorWalkResults := func() (LicenseConditionSet, bool) {
+				mu.Lock()
+				defer mu.Unlock()
 
-	var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
+				if _, alreadyWalked := amap[target]; alreadyWalked {
+					if treatAsAggregate {
+						return target.resolution, true
+					}
+					if !target.pure {
+						return target.resolution, true
+					}
+					// previously walked in a pure aggregate context,
+					// needs to walk again in non-aggregate context
+				} else {
+					target.resolution |= conditionsFn(target)
+					amap[target] = struct{}{}
+				}
+				target.pure = treatAsAggregate
+				return target.resolution, false
+			}
+			cs, alreadyWalked := priorWalkResults()
+			if alreadyWalked {
+				return cs
+			}
 
-	walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
-		priorWalkResults := func() (LicenseConditionSet, bool) {
+			c := make(chan LicenseConditionSet, len(target.edges))
+			// add all the conditions from all the dependencies
+			for _, edge := range target.edges {
+				go func(edge *TargetEdge) {
+					// walk dependency to get its conditions
+					cs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
+
+					// turn those into the conditions that apply to the target
+					cs = depConditionsPropagatingToTarget(lg, edge, cs, treatAsAggregate)
+
+					c <- cs
+				}(edge)
+			}
+			for i := 0; i < len(target.edges); i++ {
+				cs |= <-c
+			}
 			mu.Lock()
-			defer mu.Unlock()
+			target.resolution |= cs
+			mu.Unlock()
 
-			if _, alreadyWalked := amap[target]; alreadyWalked {
-				if treatAsAggregate {
-					return target.resolution, true
-				}
-				if _, asAggregate := cmap[target]; !asAggregate {
-					return target.resolution, true
-				}
-				// previously walked in a pure aggregate context,
-				// needs to walk again in non-aggregate context
-				delete(cmap, target)
-			} else {
-				target.resolution |= conditionsFn(target)
-				amap[target] = struct{}{}
-			}
-			if treatAsAggregate {
-				cmap[target] = struct{}{}
-			}
-			return target.resolution, false
-		}
-		cs, alreadyWalked := priorWalkResults()
-		if alreadyWalked {
+			// return conditions up the tree
 			return cs
 		}
 
-		c := make(chan LicenseConditionSet, len(target.edges))
-		// add all the conditions from all the dependencies
-		for _, edge := range target.edges {
-			go func(edge *TargetEdge) {
-				// walk dependency to get its conditions
-				cs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
-
-				// turn those into the conditions that apply to the target
-				cs = depConditionsPropagatingToTarget(lg, edge, cs, treatAsAggregate)
-
-				c <- cs
-			}(edge)
+		// walk each of the roots
+		for _, rname := range lg.rootFiles {
+			rnode := lg.targets[rname]
+			_ = walk(rnode, rnode.IsContainer())
 		}
-		for i := 0; i < len(target.edges); i++ {
-			cs |= <-c
-		}
-		mu.Lock()
-		target.resolution |= cs
-		mu.Unlock()
-
-		// return conditions up the tree
-		return cs
-	}
-
-	// walk each of the roots
-	for _, rname := range lg.rootFiles {
-		rnode := lg.targets[rname]
-		_ = walk(rnode, rnode.IsContainer())
-	}
-
-	wg.Done()
+	})
 }
 
 // ResolveTopDownCondtions performs a top-down walk of the LicenseGraph
@@ -150,85 +132,76 @@
 func TraceTopDownConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
-	lg.mu.Lock()
-	wg := lg.wgTD
+	lg.onceTopDown.Do(func() {
+		wg := &sync.WaitGroup{}
+		wg.Add(1)
 
-	if wg != nil {
-		lg.mu.Unlock()
-		wg.Wait()
-		return
-	}
-	wg = &sync.WaitGroup{}
-	wg.Add(1)
-	lg.wgTD = wg
-	lg.mu.Unlock()
+		// start with the conditions propagated up the graph
+		TraceBottomUpConditions(lg, conditionsFn)
 
-	// start with the conditions propagated up the graph
-	TraceBottomUpConditions(lg, conditionsFn)
+		// amap contains the set of targets already walked. (guarded by mu)
+		amap := make(map[*TargetNode]struct{})
 
-	// amap contains the set of targets already walked. (guarded by mu)
-	amap := make(map[*TargetNode]struct{})
+		// mu guards concurrent access to amap
+		var mu sync.Mutex
 
-	// cmap contains the set of targets walked as pure aggregates. i.e. containers
-	// (guarded by mu)
-	cmap := make(map[*TargetNode]struct{})
+		var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
 
-	// mu guards concurrent access to cmap
-	var mu sync.Mutex
-
-	var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
-
-	walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
-		defer wg.Done()
-		mu.Lock()
-		fnode.resolution |= conditionsFn(fnode)
-		fnode.resolution |= cs
-		amap[fnode] = struct{}{}
-		if treatAsAggregate {
-			cmap[fnode] = struct{}{}
-		}
-		cs = fnode.resolution
-		mu.Unlock()
-		// for each dependency
-		for _, edge := range fnode.edges {
-			func(edge *TargetEdge) {
-				// dcs holds the dpendency conditions inherited from the target
-				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
-				dnode := edge.dependency
+		walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
+			defer wg.Done()
+			continueWalk := func() bool {
 				mu.Lock()
 				defer mu.Unlock()
-				depcs := dnode.resolution
-				_, alreadyWalked := amap[dnode]
-				if !dcs.IsEmpty() && alreadyWalked {
-					if dcs.Difference(depcs).IsEmpty() {
+
+				depcs := fnode.resolution
+				_, alreadyWalked := amap[fnode]
+				if alreadyWalked {
+					if cs.IsEmpty() {
+						return false
+					}
+					if cs.Difference(depcs).IsEmpty() {
 						// no new conditions
 
 						// pure aggregates never need walking a 2nd time with same conditions
 						if treatAsAggregate {
-							return
+							return false
 						}
 						// non-aggregates don't need walking as non-aggregate a 2nd time
-						if _, asAggregate := cmap[dnode]; !asAggregate {
-							return
+						if !fnode.pure {
+							return false
 						}
 						// previously walked as pure aggregate; need to re-walk as non-aggregate
-						delete(cmap, dnode)
 					}
 				}
+				fnode.resolution |= conditionsFn(fnode)
+				fnode.resolution |= cs
+				fnode.pure = treatAsAggregate
+				amap[fnode] = struct{}{}
+				cs = fnode.resolution
+				return true
+			}()
+			if !continueWalk {
+				return
+			}
+			// for each dependency
+			for _, edge := range fnode.edges {
+				// dcs holds the dpendency conditions inherited from the target
+				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
+				dnode := edge.dependency
 				// add the conditions to the dependency
 				wg.Add(1)
 				go walk(dnode, dcs, treatAsAggregate && dnode.IsContainer())
-			}(edge)
+			}
 		}
-	}
 
-	// walk each of the roots
-	for _, rname := range lg.rootFiles {
-		rnode := lg.targets[rname]
-		wg.Add(1)
-		// add the conditions to the root and its transitive closure
-		go walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
-	}
-	wg.Done()
-	wg.Wait()
+		// walk each of the roots
+		for _, rname := range lg.rootFiles {
+			rnode := lg.targets[rname]
+			wg.Add(1)
+			// add the conditions to the root and its transitive closure
+			go walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
+		}
+		wg.Done()
+		wg.Wait()
+	})
 }
diff --git a/tools/compliance/policy_resolve_test.go b/tools/compliance/policy_resolve_test.go
index f98e4cc..d6731fe 100644
--- a/tools/compliance/policy_resolve_test.go
+++ b/tools/compliance/policy_resolve_test.go
@@ -289,8 +289,8 @@
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheBin.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+				{"apacheBin.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -300,8 +300,8 @@
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+				{"dependentModule.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -312,7 +312,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -322,8 +322,8 @@
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []tcond{
-				{"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+				{"dependentModule.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 	}
@@ -593,9 +593,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheBin.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
-				{"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+				{"apacheBin.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -606,9 +606,9 @@
 				{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
-				{"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+				{"dependentModule.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -620,7 +620,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 				{"mitLib.meta_lic", "notice"},
 			},
 		},
@@ -632,9 +632,9 @@
 				{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"dependentModule.meta_lic", "notice|restricted_with_classpath_exception"},
-				{"gplWithClasspathException.meta_lic", "restricted_with_classpath_exception"},
-				{"mitLib.meta_lic", "notice|restricted_with_classpath_exception"},
+				{"dependentModule.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 	}
diff --git a/tools/compliance/policy_resolvenotices_test.go b/tools/compliance/policy_resolvenotices_test.go
index cd9dd71..92b0ce3 100644
--- a/tools/compliance/policy_resolvenotices_test.go
+++ b/tools/compliance/policy_resolvenotices_test.go
@@ -217,10 +217,10 @@
 			},
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -245,7 +245,7 @@
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -258,17 +258,17 @@
 			},
 			expectedResolutions: []res{
 				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheContainer.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -280,11 +280,11 @@
 			},
 			expectedResolutions: []res{
 				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -309,7 +309,7 @@
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -336,7 +336,7 @@
 				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
 				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -363,7 +363,7 @@
 				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
 				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -375,10 +375,8 @@
 			},
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -390,10 +388,8 @@
 			},
 			expectedResolutions: []res{
 				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -418,7 +414,7 @@
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -430,9 +426,7 @@
 			},
 			expectedResolutions: []res{
 				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -444,11 +438,8 @@
 			},
 			expectedResolutions: []res{
 				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 	}
diff --git a/tools/compliance/policy_resolveshare_test.go b/tools/compliance/policy_resolveshare_test.go
index c451b86..cf88058 100644
--- a/tools/compliance/policy_resolveshare_test.go
+++ b/tools/compliance/policy_resolveshare_test.go
@@ -40,9 +40,7 @@
 			edges: []annotated{
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "independentmodulestaticrestricted",
@@ -50,10 +48,7 @@
 			edges: []annotated{
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
-			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "dependentmodulerestricted",
@@ -61,9 +56,7 @@
 			edges: []annotated{
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "dependentmodulerestrictedshipclasspath",
@@ -71,11 +64,7 @@
 			edges: []annotated{
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "lgplonfprestricted",
@@ -84,8 +73,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -95,7 +84,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -105,7 +94,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
 			},
 		},
 		{
@@ -185,9 +174,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "independentmodulereversestaticrestricted",
@@ -195,10 +182,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "dependentmodulereverserestricted",
@@ -206,9 +190,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "dependentmodulereverserestrictedshipdependent",
@@ -216,11 +198,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:  "ponrrestricted",
diff --git a/tools/compliance/policy_shareprivacyconflicts.go b/tools/compliance/policy_shareprivacyconflicts.go
index 279e179..947bb96 100644
--- a/tools/compliance/policy_shareprivacyconflicts.go
+++ b/tools/compliance/policy_shareprivacyconflicts.go
@@ -49,7 +49,11 @@
 
 	// size is the size of the result
 	size := 0
-	for _, cs := range combined {
+	for actsOn, cs := range combined {
+		if actsOn.pure && !actsOn.LicenseConditions().MatchesAnySet(ImpliesShared) {
+			// no need to share code to build "a distribution medium"
+			continue
+		}
 		size += cs.Intersection(ImpliesShared).Len() * cs.Intersection(ImpliesPrivate).Len()
 	}
 	if size == 0 {
@@ -57,6 +61,9 @@
 	}
 	result := make([]SourceSharePrivacyConflict, 0, size)
 	for actsOn, cs := range combined {
+		if actsOn.pure { // no need to share code for "a distribution medium"
+			continue
+		}
 		pconditions := cs.Intersection(ImpliesPrivate).AsList()
 		ssconditions := cs.Intersection(ImpliesShared).AsList()
 
diff --git a/tools/compliance/policy_walk.go b/tools/compliance/policy_walk.go
index f4d7bba..beb6d53 100644
--- a/tools/compliance/policy_walk.go
+++ b/tools/compliance/policy_walk.go
@@ -45,7 +45,7 @@
 }
 
 // VisitNode is called for each root and for each walked dependency node by
-// WalkTopDown. When VisitNode returns true, WalkTopDown will proceed to walk
+// WalkTopDown and WalkTopDownBreadthFirst. When VisitNode returns true, WalkTopDown will proceed to walk
 // down the dependences of the node
 type VisitNode func(lg *LicenseGraph, target *TargetNode, path TargetEdgePath) bool
 
@@ -79,6 +79,54 @@
 	}
 }
 
+// WalkTopDownBreadthFirst performs a Breadth-first top down walk of `lg` calling `visit` and descending
+// into depenencies when `visit` returns true.
+func WalkTopDownBreadthFirst(ctx EdgeContextProvider, lg *LicenseGraph, visit VisitNode) {
+	path := NewTargetEdgePath(32)
+
+	var walk func(fnode *TargetNode)
+	walk = func(fnode *TargetNode) {
+		edgesToWalk := make(TargetEdgeList, 0, len(fnode.edges))
+		for _, edge := range fnode.edges {
+			var edgeContext interface{}
+			if ctx == nil {
+				edgeContext = nil
+			} else {
+				edgeContext = ctx.Context(lg, *path, edge)
+			}
+			path.Push(edge, edgeContext)
+			if visit(lg, edge.dependency, *path){
+				edgesToWalk = append(edgesToWalk, edge)
+			}
+			path.Pop()
+		}
+
+		for _, edge := range(edgesToWalk) {
+			var edgeContext interface{}
+			if ctx == nil {
+				edgeContext = nil
+			} else {
+				edgeContext = ctx.Context(lg, *path, edge)
+			}
+			path.Push(edge, edgeContext)
+			walk(edge.dependency)
+			path.Pop()
+		}
+	}
+
+	path.Clear()
+	rootsToWalk := make([]*TargetNode, 0, len(lg.rootFiles))
+	for _, r := range lg.rootFiles {
+		if visit(lg, lg.targets[r], *path){
+			rootsToWalk = append(rootsToWalk, lg.targets[r])
+		}
+	}
+
+	for _, rnode := range(rootsToWalk) {
+		walk(rnode)
+	}
+}
+
 // resolutionKey identifies results from walking a specific target for a
 // specific set of conditions.
 type resolutionKey struct {
diff --git a/tools/compliance/policy_walk_test.go b/tools/compliance/policy_walk_test.go
index 92867f9..0bc37f8 100644
--- a/tools/compliance/policy_walk_test.go
+++ b/tools/compliance/policy_walk_test.go
@@ -16,9 +16,22 @@
 
 import (
 	"bytes"
+	"fmt"
+	"os"
+	"strings"
 	"testing"
 )
 
+func TestMain(m *testing.M) {
+	// Change into the cmd directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir("cmd"); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
 func TestWalkResolutionsForCondition(t *testing.T) {
 	tests := []struct {
 		name                string
@@ -104,8 +117,7 @@
 			},
 			expectedResolutions: []res{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -115,10 +127,7 @@
 			edges: []annotated{
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
-			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "dependentmodulenotice",
@@ -129,7 +138,6 @@
 			},
 			expectedResolutions: []res{
 				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -139,9 +147,7 @@
 			edges: []annotated{
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "lgplonfpnotice",
@@ -347,7 +353,7 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -357,9 +363,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "independentmodulereverserestrictedshipped",
@@ -368,9 +372,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "independentmodulereversestaticnotice",
@@ -380,9 +382,8 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -392,10 +393,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "dependentmodulereversenotice",
@@ -405,7 +403,7 @@
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -415,9 +413,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "dependentmodulereverserestrictedshipped",
@@ -426,11 +422,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedResolutions: []res{},
 		},
 		{
 			name:      "ponrnotice",
@@ -716,8 +708,7 @@
 			},
 			expectedActions: []act{
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -727,10 +718,7 @@
 			edges: []annotated{
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
-			expectedActions: []act{
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "dependentmodulenotice",
@@ -741,7 +729,6 @@
 			},
 			expectedActions: []act{
 				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -751,9 +738,7 @@
 			edges: []annotated{
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
-			expectedActions: []act{
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "lgplonfpnotice",
@@ -956,7 +941,7 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -966,9 +951,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
-			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "independentmodulereverserestrictedshipped",
@@ -977,9 +960,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
-			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "independentmodulereversestaticnotice",
@@ -989,9 +970,8 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -1001,10 +981,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
-			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "dependentmodulereversenotice",
@@ -1014,7 +991,7 @@
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -1024,9 +1001,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "dependentmodulereverserestrictedshipped",
@@ -1035,10 +1010,7 @@
 			edges: []annotated{
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
-			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "restricted"},
-			},
+			expectedActions: []act{},
 		},
 		{
 			name:      "ponrnotice",
@@ -1238,3 +1210,417 @@
 		})
 	}
 }
+
+func TestWalkTopDownBreadthFirst(t *testing.T) {
+	tests := []struct {
+		name           string
+		roots          []string
+		edges          []annotated
+		expectedResult []string
+	}{
+		{
+			name:  "bin/bin1",
+			roots: []string{"bin/bin1.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin2",
+			roots: []string{"bin/bin2.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin3",
+			roots: []string{"bin/bin3.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin3.meta_lic",
+			},
+		},
+		{
+			name:  "lib/liba.so",
+			roots: []string{"lib/liba.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/liba.so.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libb.so",
+			roots: []string{"lib/libb.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libc.so",
+			roots: []string{"lib/libc.a.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libd.so",
+			roots: []string{"lib/libd.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "highest.apex",
+			roots: []string{"highest.apex.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "container.zip",
+			roots: []string{"container.zip.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "application",
+			roots: []string{"application.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin1&lib/liba",
+			roots: []string{"bin/bin1.meta_lic","lib/liba.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin2&lib/libd",
+			roots: []string{"bin/bin2.meta_lic", "lib/libd.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "application&bin/bin3",
+			roots: []string{"application.meta_lic", "bin/bin3.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "highest.apex&container.zip",
+			roots: []string{"highest.apex.meta_lic", "container.zip.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			stderr := &bytes.Buffer{}
+			actualOut := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/notice/"+r)
+			}
+
+			lg, err := ReadLicenseGraph(GetFS(""), stderr, rootFiles)
+
+			if err != nil {
+				t.Errorf("unexpected test data error: got %s, want no error", err)
+				return
+			}
+
+			expectedRst := tt.expectedResult
+
+			WalkTopDownBreadthFirst(nil, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
+				fmt.Fprintln(actualOut, tn.Name())
+				return true
+			})
+
+			actualRst := strings.Split(actualOut.String(), "\n")
+
+			if len(actualRst) > 0 {
+				actualRst = actualRst[:len(actualRst)-1]
+			}
+
+			t.Logf("actual nodes visited: %s", actualOut.String())
+			t.Logf("expected nodes visited: %s", strings.Join(expectedRst, "\n"))
+
+			if len(actualRst) != len(expectedRst) {
+				t.Errorf("WalkTopDownBreadthFirst: number of visited nodes is different: got %d, want %d", len(actualRst), len(expectedRst))
+			}
+
+			for i := 0; i < len(actualRst) && i < len(expectedRst); i++ {
+				if actualRst[i] != expectedRst[i] {
+					t.Errorf("WalkTopDownBreadthFirst: lines differ at index %d: got %q, want %q", i, actualRst[i], expectedRst[i])
+					break
+				}
+			}
+
+			if len(actualRst) < len(expectedRst) {
+				t.Errorf("WalkTopDownBreadthFirst: extra lines at %d: got %q, want nothing", len(actualRst), expectedRst[len(actualRst)])
+			}
+
+			if len(expectedRst) < len(actualRst) {
+				t.Errorf("WalkTopDownBreadthFirst: missing lines at %d: got nothing, want %q", len(expectedRst), actualRst[len(expectedRst)])
+			}
+		})
+	}
+}
+
+func TestWalkTopDownBreadthFirstWithoutDuplicates(t *testing.T) {
+	tests := []struct {
+		name           string
+		roots          []string
+		edges          []annotated
+		expectedResult []string
+	}{
+		{
+			name:  "bin/bin1",
+			roots: []string{"bin/bin1.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin2",
+			roots: []string{"bin/bin2.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin3",
+			roots: []string{"bin/bin3.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin3.meta_lic",
+			},
+		},
+		{
+			name:  "lib/liba.so",
+			roots: []string{"lib/liba.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/liba.so.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libb.so",
+			roots: []string{"lib/libb.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libc.so",
+			roots: []string{"lib/libc.a.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "lib/libd.so",
+			roots: []string{"lib/libd.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "highest.apex",
+			roots: []string{"highest.apex.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "container.zip",
+			roots: []string{"container.zip.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			name:  "application",
+			roots: []string{"application.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin1&lib/liba",
+			roots: []string{"bin/bin1.meta_lic", "lib/liba.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			name:  "bin/bin2&lib/libd",
+			roots: []string{"bin/bin2.meta_lic", "lib/libd.so.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "application&bin/bin3",
+			roots: []string{"application.meta_lic", "bin/bin3.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			name:  "highest.apex&container.zip",
+			roots: []string{"highest.apex.meta_lic", "container.zip.meta_lic"},
+			expectedResult: []string{
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+	}
+
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			stderr := &bytes.Buffer{}
+			actualOut := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/notice/"+r)
+			}
+
+			lg, err := ReadLicenseGraph(GetFS(""), stderr, rootFiles)
+
+			if err != nil {
+				t.Errorf("unexpected test data error: got %s, want no error", err)
+				return
+			}
+
+			expectedRst := tt.expectedResult
+
+			//Keeping track of the visited nodes
+			//Only add to actualOut if not visited
+			visitedNodes := make(map[string]struct{})
+			WalkTopDownBreadthFirst(nil, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
+				if _, alreadyVisited := visitedNodes[tn.Name()]; alreadyVisited {
+					return false
+				}
+				fmt.Fprintln(actualOut, tn.Name())
+				visitedNodes[tn.Name()] = struct{}{}
+				return true
+			})
+
+			actualRst := strings.Split(actualOut.String(), "\n")
+
+			if len(actualRst) > 0 {
+				actualRst = actualRst[:len(actualRst)-1]
+			}
+
+			t.Logf("actual nodes visited: %s", actualOut.String())
+			t.Logf("expected nodes visited: %s", strings.Join(expectedRst, "\n"))
+
+			if len(actualRst) != len(expectedRst) {
+				t.Errorf("WalkTopDownBreadthFirst: number of visited nodes is different: got %d, want %d", len(actualRst), len(expectedRst))
+			}
+
+			for i := 0; i < len(actualRst) && i < len(expectedRst); i++ {
+				if actualRst[i] != expectedRst[i] {
+					t.Errorf("WalkTopDownBreadthFirst: lines differ at index %d: got %q, want %q", i, actualRst[i], expectedRst[i])
+					break
+				}
+			}
+
+			if len(actualRst) < len(expectedRst) {
+				t.Errorf("WalkTopDownBreadthFirst: extra lines at %d: got %q, want nothing", len(actualRst), expectedRst[len(actualRst)])
+			}
+
+			if len(expectedRst) < len(actualRst) {
+				t.Errorf("WalkTopDownBreadthFirst: missing lines at %d: got nothing, want %q", len(expectedRst), actualRst[len(expectedRst)])
+			}
+		})
+	}
+}
diff --git a/tools/compliance/projectmetadata/Android.bp b/tools/compliance/projectmetadata/Android.bp
new file mode 100644
index 0000000..dccff76
--- /dev/null
+++ b/tools/compliance/projectmetadata/Android.bp
@@ -0,0 +1,34 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+    name: "projectmetadata-module",
+    srcs: [
+        "projectmetadata.go",
+    ],
+    deps: [
+        "compliance-test-fs-module",
+        "golang-protobuf-proto",
+        "golang-protobuf-encoding-prototext",
+        "project_metadata_proto",
+    ],
+    testSrcs: [
+        "projectmetadata_test.go",
+    ],
+    pkgPath: "android/soong/tools/compliance/projectmetadata",
+}
diff --git a/tools/compliance/projectmetadata/projectmetadata.go b/tools/compliance/projectmetadata/projectmetadata.go
new file mode 100644
index 0000000..f0ad7b5
--- /dev/null
+++ b/tools/compliance/projectmetadata/projectmetadata.go
@@ -0,0 +1,287 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package projectmetadata
+
+import (
+	"fmt"
+	"io"
+	"io/fs"
+	"path/filepath"
+	"strings"
+	"sync"
+
+	"android/soong/compliance/project_metadata_proto"
+
+	"google.golang.org/protobuf/encoding/prototext"
+)
+
+var (
+	// ConcurrentReaders is the size of the task pool for limiting resource usage e.g. open files.
+	ConcurrentReaders = 5
+)
+
+// ProjectMetadata contains the METADATA for a git project.
+type ProjectMetadata struct {
+	proto project_metadata_proto.Metadata
+
+	// project is the path to the directory containing the METADATA file.
+	project string
+}
+
+// ProjectUrlMap maps url type name to url value
+type ProjectUrlMap map[string]string
+
+// DownloadUrl returns the address of a download location
+func (m ProjectUrlMap) DownloadUrl() string {
+	for _, urlType := range []string{"GIT", "SVN", "HG", "DARCS"} {
+		if url, ok := m[urlType]; ok {
+			return url
+		}
+	}
+	return ""
+}
+
+// String returns a string representation of the metadata for error messages.
+func (pm *ProjectMetadata) String() string {
+	return fmt.Sprintf("project: %q\n%s", pm.project, pm.proto.String())
+}
+
+// ProjectName returns the name of the project.
+func (pm *ProjectMetadata) Name() string {
+	return pm.proto.GetName()
+}
+
+// ProjectVersion returns the version of the project if available.
+func (pm *ProjectMetadata) Version() string {
+	tp := pm.proto.GetThirdParty()
+	if tp != nil {
+		version := tp.GetVersion()
+		return version
+	}
+	return ""
+}
+
+// VersionedName returns the name of the project including the version if any.
+func (pm *ProjectMetadata) VersionedName() string {
+	name := pm.proto.GetName()
+	if name != "" {
+		tp := pm.proto.GetThirdParty()
+		if tp != nil {
+			version := tp.GetVersion()
+			if version != "" {
+				if version[0] == 'v' || version[0] == 'V' {
+					return name + "_" + version
+				} else {
+					return name + "_v_" + version
+				}
+			}
+		}
+		return name
+	}
+	return pm.proto.GetDescription()
+}
+
+// UrlsByTypeName returns a map of URLs by Type Name
+func (pm *ProjectMetadata) UrlsByTypeName() ProjectUrlMap {
+	tp := pm.proto.GetThirdParty()
+	if tp == nil {
+		return nil
+	}
+	if len(tp.Url) == 0 {
+		return nil
+	}
+	urls := make(ProjectUrlMap)
+
+	for _, url := range tp.Url {
+		uri := url.GetValue()
+		if uri == "" {
+			continue
+		}
+		urls[project_metadata_proto.URL_Type_name[int32(url.GetType())]] = uri
+	}
+	return urls
+}
+
+// projectIndex describes a project to be read; after `wg.Wait()`, will contain either
+// a `ProjectMetadata`, pm (can be nil even without error), or a non-nil `err`.
+type projectIndex struct {
+	project string
+	path    string
+	pm      *ProjectMetadata
+	err     error
+	done    chan struct{}
+}
+
+// finish marks the task to read the `projectIndex` completed.
+func (pi *projectIndex) finish() {
+	close(pi.done)
+}
+
+// wait suspends execution until the `projectIndex` task completes.
+func (pi *projectIndex) wait() {
+	<-pi.done
+}
+
+// Index reads and caches ProjectMetadata (thread safe)
+type Index struct {
+	// projecs maps project name to a wait group if read has already started, and
+	// to a `ProjectMetadata` or to an `error` after the read completes.
+	projects sync.Map
+
+	// task provides a fixed-size task pool to limit concurrent open files etc.
+	task chan bool
+
+	// rootFS locates the root of the file system from which to read the files.
+	rootFS fs.FS
+}
+
+// NewIndex constructs a project metadata `Index` for the given file system.
+func NewIndex(rootFS fs.FS) *Index {
+	ix := &Index{task: make(chan bool, ConcurrentReaders), rootFS: rootFS}
+	for i := 0; i < ConcurrentReaders; i++ {
+		ix.task <- true
+	}
+	return ix
+}
+
+// MetadataForProjects returns 0..n ProjectMetadata for n `projects`, or an error.
+// Each project that has a METADATA.android or a METADATA file in the root of the project will have
+// a corresponding ProjectMetadata in the result. Projects with neither file get skipped. A nil
+// result with no error indicates none of the given `projects` has a METADATA file.
+// (thread safe -- can be called concurrently from multiple goroutines)
+func (ix *Index) MetadataForProjects(projects ...string) ([]*ProjectMetadata, error) {
+	if ConcurrentReaders < 1 {
+		return nil, fmt.Errorf("need at least one task in project metadata pool")
+	}
+	if len(projects) == 0 {
+		return nil, nil
+	}
+	// Identify the projects that have never been read
+	projectsToRead := make([]*projectIndex, 0, len(projects))
+	projectIndexes := make([]*projectIndex, 0, len(projects))
+	for _, p := range projects {
+		pi, loaded := ix.projects.LoadOrStore(p, &projectIndex{project: p, done: make(chan struct{})})
+		if !loaded {
+			projectsToRead = append(projectsToRead, pi.(*projectIndex))
+		}
+		projectIndexes = append(projectIndexes, pi.(*projectIndex))
+	}
+	// findMeta locates and reads the appropriate METADATA file, if any.
+	findMeta := func(pi *projectIndex) {
+		<-ix.task
+		defer func() {
+			ix.task <- true
+			pi.finish()
+		}()
+
+		// Support METADATA.android for projects that already have a different sort of METADATA file.
+		path := filepath.Join(pi.project, "METADATA.android")
+		fi, err := fs.Stat(ix.rootFS, path)
+		if err == nil {
+			if fi.Mode().IsRegular() {
+				ix.readMetadataFile(pi, path)
+				return
+			}
+		}
+		// No METADATA.android try METADATA file.
+		path = filepath.Join(pi.project, "METADATA")
+		fi, err = fs.Stat(ix.rootFS, path)
+		if err == nil {
+			if fi.Mode().IsRegular() {
+				ix.readMetadataFile(pi, path)
+				return
+			}
+		}
+		// no METADATA file exists -- leave nil and finish
+	}
+	// Look for the METADATA files to read, and record any missing.
+	for _, p := range projectsToRead {
+		go findMeta(p)
+	}
+	// Wait until all of the projects have been read.
+	var msg strings.Builder
+	result := make([]*ProjectMetadata, 0, len(projects))
+	for _, pi := range projectIndexes {
+		pi.wait()
+		// Combine any errors into a single error.
+		if pi.err != nil {
+			fmt.Fprintf(&msg, "  %v\n", pi.err)
+		} else if pi.pm != nil {
+			result = append(result, pi.pm)
+		}
+	}
+	if msg.Len() > 0 {
+		return nil, fmt.Errorf("error reading project(s):\n%s", msg.String())
+	}
+	if len(result) == 0 {
+		return nil, nil
+	}
+	return result, nil
+}
+
+// AllMetadataFiles returns the sorted list of all METADATA files read thus far.
+func (ix *Index) AllMetadataFiles() []string {
+	var files []string
+	ix.projects.Range(func(key, value any) bool {
+		pi := value.(*projectIndex)
+		if pi.path != "" {
+			files = append(files, pi.path)
+		}
+		return true
+	})
+	return files
+}
+
+// readMetadataFile tries to read and parse a METADATA file at `path` for `project`.
+func (ix *Index) readMetadataFile(pi *projectIndex, path string) {
+	f, err := ix.rootFS.Open(path)
+	if err != nil {
+		pi.err = fmt.Errorf("error opening project %q metadata %q: %w", pi.project, path, err)
+		return
+	}
+
+	// read the file
+	data, err := io.ReadAll(f)
+	if err != nil {
+		pi.err = fmt.Errorf("error reading project %q metadata %q: %w", pi.project, path, err)
+		return
+	}
+	f.Close()
+
+	uo := prototext.UnmarshalOptions{DiscardUnknown: true}
+	pm := &ProjectMetadata{project: pi.project}
+	err = uo.Unmarshal(data, &pm.proto)
+	if err != nil {
+		pi.err = fmt.Errorf(`error in project %q METADATA %q: %v
+
+METADATA and METADATA.android files must parse as text protobufs
+defined by
+   build/soong/compliance/project_metadata_proto/project_metadata.proto
+
+* unknown fields don't matter
+* check invalid ENUM names
+* check quoting
+* check unescaped nested quotes
+* check the comment marker for protobuf is '#' not '//'
+
+if importing a library that uses a different sort of METADATA file, add
+a METADATA.android file beside it to parse instead
+`, pi.project, path, err)
+		return
+	}
+
+	pi.path = path
+	pi.pm = pm
+}
diff --git a/tools/compliance/projectmetadata/projectmetadata_test.go b/tools/compliance/projectmetadata/projectmetadata_test.go
new file mode 100644
index 0000000..0af0cd7
--- /dev/null
+++ b/tools/compliance/projectmetadata/projectmetadata_test.go
@@ -0,0 +1,722 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package projectmetadata
+
+import (
+	"fmt"
+	"strings"
+	"testing"
+
+	"android/soong/compliance/project_metadata_proto"
+	"android/soong/tools/compliance/testfs"
+)
+
+const (
+	// EMPTY represents a METADATA file with no recognized fields
+	EMPTY = ``
+
+	// INVALID_NAME represents a METADATA file with the wrong type of name
+	INVALID_NAME = `name: a library\n`
+
+	// INVALID_DESCRIPTION represents a METADATA file with the wrong type of description
+	INVALID_DESCRIPTION = `description: unquoted text\n`
+
+	// INVALID_VERSION represents a METADATA file with the wrong type of version
+	INVALID_VERSION = `third_party { version: 1 }`
+
+	// MY_LIB_1_0 represents a METADATA file for version 1.0 of mylib
+	MY_LIB_1_0 = `name: "mylib" description: "my library" third_party { version: "1.0" }`
+
+	// NO_NAME_0_1 represents a METADATA file with a description but no name
+	NO_NAME_0_1 = `description: "my library" third_party { version: "0.1" }`
+
+	// URL values per type
+	GIT_URL          = "http://example.github.com/my_lib"
+	SVN_URL          = "http://example.svn.com/my_lib"
+	HG_URL           = "http://example.hg.com/my_lib"
+	DARCS_URL        = "http://example.darcs.com/my_lib"
+	PIPER_URL        = "http://google3/third_party/my/package"
+	HOMEPAGE_URL     = "http://example.com/homepage"
+	OTHER_URL        = "http://google.com/"
+	ARCHIVE_URL      = "http://ftp.example.com/"
+	LOCAL_SOURCE_URL = "https://android.googlesource.com/platform/external/apache-http/"
+)
+
+// libWithUrl returns a METADATA file with the right download url
+func libWithUrl(urlTypes ...string) string {
+	var sb strings.Builder
+
+	fmt.Fprintln(&sb, `name: "mylib" description: "my library"
+	 third_party {
+	 	version: "1.0"`)
+
+	for _, urltype := range urlTypes {
+		var urlValue string
+		switch urltype {
+		case "GIT":
+			urlValue = GIT_URL
+		case "SVN":
+			urlValue = SVN_URL
+		case "HG":
+			urlValue = HG_URL
+		case "DARCS":
+			urlValue = DARCS_URL
+		case "PIPER":
+			urlValue = PIPER_URL
+		case "HOMEPAGE":
+			urlValue = HOMEPAGE_URL
+		case "OTHER":
+			urlValue = OTHER_URL
+		case "ARCHIVE":
+			urlValue = ARCHIVE_URL
+		case "LOCAL_SOURCE":
+			urlValue = LOCAL_SOURCE_URL
+		default:
+			panic(fmt.Errorf("unknown url type: %q. Please update libWithUrl() in build/make/tools/compliance/projectmetadata/projectmetadata_test.go", urltype))
+		}
+		fmt.Fprintf(&sb, "  url { type: %s value: %q }\n", urltype, urlValue)
+	}
+	fmt.Fprintln(&sb, `}`)
+
+	return sb.String()
+}
+
+func TestVerifyAllUrlTypes(t *testing.T) {
+	t.Run("verifyAllUrlTypes", func(t *testing.T) {
+		types := make([]string, 0, len(project_metadata_proto.URL_Type_value))
+		for t := range project_metadata_proto.URL_Type_value {
+			types = append(types, t)
+		}
+		libWithUrl(types...)
+	})
+}
+
+func TestUnknownPanics(t *testing.T) {
+	t.Run("Unknown panics", func(t *testing.T) {
+		defer func() {
+			if r := recover(); r == nil {
+				t.Errorf("unexpected success: got no error, want panic")
+			}
+		}()
+		libWithUrl("SOME WILD VALUE THAT DOES NOT EXIST")
+	})
+}
+
+func TestReadMetadataForProjects(t *testing.T) {
+	tests := []struct {
+		name          string
+		fs            *testfs.TestFS
+		projects      []string
+		expectedError string
+		expected      []pmeta
+	}{
+		{
+			name: "trivial",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte("name: \"Android\"\n"),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "Android",
+				name:          "Android",
+				version:       "",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "versioned",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(MY_LIB_1_0),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_homepage",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("HOMEPAGE")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_git",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("GIT")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   GIT_URL,
+			}},
+		},
+		{
+			name: "lib_with_svn",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("SVN")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   SVN_URL,
+			}},
+		},
+		{
+			name: "lib_with_hg",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("HG")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   HG_URL,
+			}},
+		},
+		{
+			name: "lib_with_darcs",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("DARCS")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   DARCS_URL,
+			}},
+		},
+		{
+			name: "lib_with_piper",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("PIPER")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_other",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("OTHER")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_local_source",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("LOCAL_SOURCE")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_archive",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("ARCHIVE")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_all_downloads",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN", "GIT")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   GIT_URL,
+			}},
+		},
+		{
+			name: "lib_with_all_downloads_in_different_order",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("DARCS", "GIT", "SVN", "HG")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   GIT_URL,
+			}},
+		},
+		{
+			name: "lib_with_all_but_git",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("DARCS", "HG", "SVN")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   SVN_URL,
+			}},
+		},
+		{
+			name: "lib_with_all_but_git_and_svn",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("DARCS", "HG")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   HG_URL,
+			}},
+		},
+		{
+			name: "lib_with_all_nondownloads_and_git",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE", "GIT")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   GIT_URL,
+			}},
+		},
+		{
+			name: "lib_with_all_nondownloads",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl("HOMEPAGE", "LOCAL_SOURCE", "PIPER", "ARCHIVE")),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "lib_with_all_nondownloads",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(libWithUrl()),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "versioneddesc",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(NO_NAME_0_1),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "my library",
+				name:          "",
+				version:       "0.1",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "unterminated",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte("name: \"Android\n"),
+			},
+			projects:      []string{"/a"},
+			expectedError: `invalid character '\n' in string`,
+		},
+		{
+			name: "abc",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(EMPTY),
+				"/b/METADATA": []byte(MY_LIB_1_0),
+				"/c/METADATA": []byte(NO_NAME_0_1),
+			},
+			projects: []string{"/a", "/b", "/c"},
+			expected: []pmeta{
+				{
+					project:       "/a",
+					versionedName: "",
+					name:          "",
+					version:       "",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/b",
+					versionedName: "mylib_v_1.0",
+					name:          "mylib",
+					version:       "1.0",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/c",
+					versionedName: "my library",
+					name:          "",
+					version:       "0.1",
+					downloadUrl:   "",
+				},
+			},
+		},
+		{
+			name: "ab",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(EMPTY),
+				"/b/METADATA": []byte(MY_LIB_1_0),
+			},
+			projects: []string{"/a", "/b", "/c"},
+			expected: []pmeta{
+				{
+					project:       "/a",
+					versionedName: "",
+					name:          "",
+					version:       "",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/b",
+					versionedName: "mylib_v_1.0",
+					name:          "mylib",
+					version:       "1.0",
+					downloadUrl:   "",
+				},
+			},
+		},
+		{
+			name: "ac",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(EMPTY),
+				"/c/METADATA": []byte(NO_NAME_0_1),
+			},
+			projects: []string{"/a", "/b", "/c"},
+			expected: []pmeta{
+				{
+					project:       "/a",
+					versionedName: "",
+					name:          "",
+					version:       "",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/c",
+					versionedName: "my library",
+					name:          "",
+					version:       "0.1",
+					downloadUrl:   "",
+				},
+			},
+		},
+		{
+			name: "bc",
+			fs: &testfs.TestFS{
+				"/b/METADATA": []byte(MY_LIB_1_0),
+				"/c/METADATA": []byte(NO_NAME_0_1),
+			},
+			projects: []string{"/a", "/b", "/c"},
+			expected: []pmeta{
+				{
+					project:       "/b",
+					versionedName: "mylib_v_1.0",
+					name:          "mylib",
+					version:       "1.0",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/c",
+					versionedName: "my library",
+					name:          "",
+					version:       "0.1",
+					downloadUrl:   "",
+				},
+			},
+		},
+		{
+			name: "wrongnametype",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(INVALID_NAME),
+			},
+			projects:      []string{"/a"},
+			expectedError: `invalid value for string type`,
+		},
+		{
+			name: "wrongdescriptiontype",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(INVALID_DESCRIPTION),
+			},
+			projects:      []string{"/a"},
+			expectedError: `invalid value for string type`,
+		},
+		{
+			name: "wrongversiontype",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(INVALID_VERSION),
+			},
+			projects:      []string{"/a"},
+			expectedError: `invalid value for string type`,
+		},
+		{
+			name: "wrongtype",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+			},
+			projects:      []string{"/a"},
+			expectedError: `invalid value for string type`,
+		},
+		{
+			name: "empty",
+			fs: &testfs.TestFS{
+				"/a/METADATA": []byte(EMPTY),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "",
+				name:          "",
+				version:       "",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "emptyother",
+			fs: &testfs.TestFS{
+				"/a/METADATA.bp": []byte(EMPTY),
+			},
+			projects: []string{"/a"},
+		},
+		{
+			name:     "emptyfs",
+			fs:       &testfs.TestFS{},
+			projects: []string{"/a"},
+		},
+		{
+			name: "override",
+			fs: &testfs.TestFS{
+				"/a/METADATA":         []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+				"/a/METADATA.android": []byte(MY_LIB_1_0),
+			},
+			projects: []string{"/a"},
+			expected: []pmeta{{
+				project:       "/a",
+				versionedName: "mylib_v_1.0",
+				name:          "mylib",
+				version:       "1.0",
+				downloadUrl:   "",
+			}},
+		},
+		{
+			name: "enchilada",
+			fs: &testfs.TestFS{
+				"/a/METADATA":         []byte(INVALID_NAME + INVALID_DESCRIPTION + INVALID_VERSION),
+				"/a/METADATA.android": []byte(EMPTY),
+				"/b/METADATA":         []byte(MY_LIB_1_0),
+				"/c/METADATA":         []byte(NO_NAME_0_1),
+			},
+			projects: []string{"/a", "/b", "/c"},
+			expected: []pmeta{
+				{
+					project:       "/a",
+					versionedName: "",
+					name:          "",
+					version:       "",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/b",
+					versionedName: "mylib_v_1.0",
+					name:          "mylib",
+					version:       "1.0",
+					downloadUrl:   "",
+				},
+				{
+					project:       "/c",
+					versionedName: "my library",
+					name:          "",
+					version:       "0.1",
+					downloadUrl:   "",
+				},
+			},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.name, func(t *testing.T) {
+			ix := NewIndex(tt.fs)
+			pms, err := ix.MetadataForProjects(tt.projects...)
+			if err != nil {
+				if len(tt.expectedError) == 0 {
+					t.Errorf("unexpected error: got %s, want no error", err)
+				} else if !strings.Contains(err.Error(), tt.expectedError) {
+					t.Errorf("unexpected error: got %s, want %q", err, tt.expectedError)
+				}
+				return
+			}
+			t.Logf("actual %d project metadata", len(pms))
+			for _, pm := range pms {
+				t.Logf("  %v", pm.String())
+			}
+			t.Logf("expected %d project metadata", len(tt.expected))
+			for _, pm := range tt.expected {
+				t.Logf("  %s", pm.String())
+			}
+			if len(tt.expectedError) > 0 {
+				t.Errorf("unexpected success: got no error, want %q err", tt.expectedError)
+				return
+			}
+			if len(pms) != len(tt.expected) {
+				t.Errorf("missing project metadata: got %d project metadata, want %d", len(pms), len(tt.expected))
+			}
+			for i := 0; i < len(pms) && i < len(tt.expected); i++ {
+				if msg := tt.expected[i].difference(pms[i]); msg != "" {
+					t.Errorf("unexpected metadata starting at index %d: %s", i, msg)
+					return
+				}
+			}
+			if len(pms) < len(tt.expected) {
+				t.Errorf("missing metadata starting at index %d: got nothing, want %s", len(pms), tt.expected[len(pms)].String())
+			}
+			if len(tt.expected) < len(pms) {
+				t.Errorf("unexpected metadata starting at index %d: got %s, want nothing", len(tt.expected), pms[len(tt.expected)].String())
+			}
+		})
+	}
+}
+
+type pmeta struct {
+	project       string
+	versionedName string
+	name          string
+	version       string
+	downloadUrl   string
+}
+
+func (pm pmeta) String() string {
+	return fmt.Sprintf("project: %q versionedName: %q name: %q version: %q downloadUrl: %q\n", pm.project, pm.versionedName, pm.name, pm.version, pm.downloadUrl)
+}
+
+func (pm pmeta) equals(other *ProjectMetadata) bool {
+	if pm.project != other.project {
+		return false
+	}
+	if pm.versionedName != other.VersionedName() {
+		return false
+	}
+	if pm.name != other.Name() {
+		return false
+	}
+	if pm.version != other.Version() {
+		return false
+	}
+	if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+		return false
+	}
+	return true
+}
+
+func (pm pmeta) difference(other *ProjectMetadata) string {
+	if pm.equals(other) {
+		return ""
+	}
+	var sb strings.Builder
+	fmt.Fprintf(&sb, "got")
+	if pm.project != other.project {
+		fmt.Fprintf(&sb, " project: %q", other.project)
+	}
+	if pm.versionedName != other.VersionedName() {
+		fmt.Fprintf(&sb, " versionedName: %q", other.VersionedName())
+	}
+	if pm.name != other.Name() {
+		fmt.Fprintf(&sb, " name: %q", other.Name())
+	}
+	if pm.version != other.Version() {
+		fmt.Fprintf(&sb, " version: %q", other.Version())
+	}
+	if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+		fmt.Fprintf(&sb, " downloadUrl: %q", other.UrlsByTypeName().DownloadUrl())
+	}
+	fmt.Fprintf(&sb, ", want")
+	if pm.project != other.project {
+		fmt.Fprintf(&sb, " project: %q", pm.project)
+	}
+	if pm.versionedName != other.VersionedName() {
+		fmt.Fprintf(&sb, " versionedName: %q", pm.versionedName)
+	}
+	if pm.name != other.Name() {
+		fmt.Fprintf(&sb, " name: %q", pm.name)
+	}
+	if pm.version != other.Version() {
+		fmt.Fprintf(&sb, " version: %q", pm.version)
+	}
+	if pm.downloadUrl != other.UrlsByTypeName().DownloadUrl() {
+		fmt.Fprintf(&sb, " downloadUrl: %q", pm.downloadUrl)
+	}
+	return sb.String()
+}
diff --git a/tools/compliance/readgraph.go b/tools/compliance/readgraph.go
index 7516440..bf364e6 100644
--- a/tools/compliance/readgraph.go
+++ b/tools/compliance/readgraph.go
@@ -34,10 +34,17 @@
 
 type globalFS struct{}
 
+var _ fs.FS = globalFS{}
+var _ fs.StatFS = globalFS{}
+
 func (s globalFS) Open(name string) (fs.File, error) {
 	return os.Open(name)
 }
 
+func (s globalFS) Stat(name string) (fs.FileInfo, error) {
+	return os.Stat(name)
+}
+
 var FS globalFS
 
 // GetFS returns a filesystem for accessing files under the OUT_DIR environment variable.
@@ -198,6 +205,9 @@
 
 	// resolution identifies the set of conditions resolved by acting on the target node.
 	resolution LicenseConditionSet
+
+	// pure indicates whether to treat the node as a pure aggregate (no internal linkage)
+	pure bool
 }
 
 // addDependencies converts the proto AnnotatedDependencies into `edges`
diff --git a/tools/compliance/readgraph_test.go b/tools/compliance/readgraph_test.go
index bcf9f39..a2fb04d 100644
--- a/tools/compliance/readgraph_test.go
+++ b/tools/compliance/readgraph_test.go
@@ -19,12 +19,14 @@
 	"sort"
 	"strings"
 	"testing"
+
+	"android/soong/tools/compliance/testfs"
 )
 
 func TestReadLicenseGraph(t *testing.T) {
 	tests := []struct {
 		name            string
-		fs              *testFS
+		fs              *testfs.TestFS
 		roots           []string
 		expectedError   string
 		expectedEdges   []edge
@@ -32,7 +34,7 @@
 	}{
 		{
 			name: "trivial",
-			fs: &testFS{
+			fs: &testfs.TestFS{
 				"app.meta_lic": []byte("package_name: \"Android\"\n"),
 			},
 			roots:           []string{"app.meta_lic"},
@@ -41,7 +43,7 @@
 		},
 		{
 			name: "unterminated",
-			fs: &testFS{
+			fs: &testfs.TestFS{
 				"app.meta_lic": []byte("package_name: \"Android\n"),
 			},
 			roots:         []string{"app.meta_lic"},
@@ -49,7 +51,7 @@
 		},
 		{
 			name: "danglingref",
-			fs: &testFS{
+			fs: &testfs.TestFS{
 				"app.meta_lic": []byte(AOSP + "deps: {\n  file: \"lib.meta_lic\"\n}\n"),
 			},
 			roots:         []string{"app.meta_lic"},
@@ -57,7 +59,7 @@
 		},
 		{
 			name: "singleedge",
-			fs: &testFS{
+			fs: &testfs.TestFS{
 				"app.meta_lic": []byte(AOSP + "deps: {\n  file: \"lib.meta_lic\"\n}\n"),
 				"lib.meta_lic": []byte(AOSP),
 			},
@@ -67,7 +69,7 @@
 		},
 		{
 			name: "fullgraph",
-			fs: &testFS{
+			fs: &testfs.TestFS{
 				"apex.meta_lic": []byte(AOSP + "deps: {\n  file: \"app.meta_lic\"\n}\ndeps: {\n  file: \"bin.meta_lic\"\n}\n"),
 				"app.meta_lic":  []byte(AOSP),
 				"bin.meta_lic":  []byte(AOSP + "deps: {\n  file: \"lib.meta_lic\"\n}\n"),
diff --git a/tools/compliance/resolutionset.go b/tools/compliance/resolutionset.go
index 7c8f333..1be4a34 100644
--- a/tools/compliance/resolutionset.go
+++ b/tools/compliance/resolutionset.go
@@ -72,6 +72,16 @@
 	return isPresent
 }
 
+// IsPureAggregate returns true if `target`, which must be in
+// `AttachesTo()` resolves to a pure aggregate in the resolution.
+func (rs ResolutionSet) IsPureAggregate(target *TargetNode) bool {
+	_, isPresent := rs[target]
+	if !isPresent {
+		panic(fmt.Errorf("ResolutionSet.IsPureAggregate(%s): not attached to %s", target.Name(), target.Name()))
+	}
+	return target.pure
+}
+
 // Resolutions returns the list of resolutions that `attachedTo`
 // target must resolve. Returns empty list if no conditions apply.
 func (rs ResolutionSet) Resolutions(attachesTo *TargetNode) ResolutionList {
diff --git a/tools/compliance/test_util.go b/tools/compliance/test_util.go
index 26d7461..db711a7 100644
--- a/tools/compliance/test_util.go
+++ b/tools/compliance/test_util.go
@@ -17,10 +17,11 @@
 import (
 	"fmt"
 	"io"
-	"io/fs"
 	"sort"
 	"strings"
 	"testing"
+
+	"android/soong/tools/compliance/testfs"
 )
 
 const (
@@ -42,7 +43,7 @@
 	Classpath = `` +
 		`package_name: "Free Software"
 license_kinds: "SPDX-license-identifier-GPL-2.0-with-classpath-exception"
-license_conditions: "restricted"
+license_conditions: "permissive"
 `
 
 	// DependentModule starts a test metadata file for a module in the same package as `Classpath`.
@@ -56,7 +57,7 @@
 	LGPL = `` +
 		`package_name: "Free Library"
 license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_allows_dynamic_linking"
 `
 
 	// MPL starts a test metadata file for a module with MPL 2.0 reciprical licensing.
@@ -145,51 +146,6 @@
 	return cs
 }
 
-// testFS implements a test file system (fs.FS) simulated by a map from filename to []byte content.
-type testFS map[string][]byte
-
-// Open implements fs.FS.Open() to open a file based on the filename.
-func (fs *testFS) Open(name string) (fs.File, error) {
-	if _, ok := (*fs)[name]; !ok {
-		return nil, fmt.Errorf("unknown file %q", name)
-	}
-	return &testFile{fs, name, 0}, nil
-}
-
-// testFile implements a test file (fs.File) based on testFS above.
-type testFile struct {
-	fs   *testFS
-	name string
-	posn int
-}
-
-// Stat not implemented to obviate implementing fs.FileInfo.
-func (f *testFile) Stat() (fs.FileInfo, error) {
-	return nil, fmt.Errorf("unimplemented")
-}
-
-// Read copies bytes from the testFS map.
-func (f *testFile) Read(b []byte) (int, error) {
-	if f.posn < 0 {
-		return 0, fmt.Errorf("file not open: %q", f.name)
-	}
-	if f.posn >= len((*f.fs)[f.name]) {
-		return 0, io.EOF
-	}
-	n := copy(b, (*f.fs)[f.name][f.posn:])
-	f.posn += n
-	return n, nil
-}
-
-// Close marks the testFile as no longer in use.
-func (f *testFile) Close() error {
-	if f.posn < 0 {
-		return fmt.Errorf("file already closed: %q", f.name)
-	}
-	f.posn = -1
-	return nil
-}
-
 // edge describes test data edges to define test graphs.
 type edge struct {
 	target, dep string
@@ -268,7 +224,7 @@
 			deps[edge.dep] = []annotated{}
 		}
 	}
-	fs := make(testFS)
+	fs := make(testfs.TestFS)
 	for file, edges := range deps {
 		body := meta[file]
 		for _, edge := range edges {
@@ -521,7 +477,7 @@
 			expectedConditions := expectedRl[i].Resolves()
 			actualConditions := actualRl[i].Resolves()
 			if expectedConditions != actualConditions {
-				t.Errorf("unexpected conditions apply to %q acting on %q: got %04x with names %s, want %04x with names %s",
+				t.Errorf("unexpected conditions apply to %q acting on %q: got %#v with names %s, want %#v with names %s",
 					target.name, expectedRl[i].actsOn.name,
 					actualConditions, actualConditions.Names(),
 					expectedConditions, expectedConditions.Names())
@@ -586,7 +542,7 @@
 			expectedConditions := expectedRl[i].Resolves()
 			actualConditions := actualRl[i].Resolves()
 			if expectedConditions != (expectedConditions & actualConditions) {
-				t.Errorf("expected conditions missing from %q acting on %q: got %04x with names %s, want %04x with names %s",
+				t.Errorf("expected conditions missing from %q acting on %q: got %#v with names %s, want %#v with names %s",
 					target.name, expectedRl[i].actsOn.name,
 					actualConditions, actualConditions.Names(),
 					expectedConditions, expectedConditions.Names())
diff --git a/tools/compliance/testfs/Android.bp b/tools/compliance/testfs/Android.bp
new file mode 100644
index 0000000..6baaf18
--- /dev/null
+++ b/tools/compliance/testfs/Android.bp
@@ -0,0 +1,25 @@
+// Copyright (C) 2022 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+bootstrap_go_package {
+    name: "compliance-test-fs-module",
+    srcs: [
+        "testfs.go",
+    ],
+    pkgPath: "android/soong/tools/compliance/testfs",
+}
diff --git a/tools/compliance/testfs/testfs.go b/tools/compliance/testfs/testfs.go
new file mode 100644
index 0000000..2c75c5b
--- /dev/null
+++ b/tools/compliance/testfs/testfs.go
@@ -0,0 +1,129 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package testfs
+
+import (
+	"fmt"
+	"io"
+	"io/fs"
+	"strings"
+	"time"
+)
+
+// TestFS implements a test file system (fs.FS) simulated by a map from filename to []byte content.
+type TestFS map[string][]byte
+
+var _ fs.FS = (*TestFS)(nil)
+var _ fs.StatFS = (*TestFS)(nil)
+
+// Open implements fs.FS.Open() to open a file based on the filename.
+func (tfs *TestFS) Open(name string) (fs.File, error) {
+	if _, ok := (*tfs)[name]; !ok {
+		return nil, fmt.Errorf("unknown file %q", name)
+	}
+	return &TestFile{tfs, name, 0}, nil
+}
+
+// Stat implements fs.StatFS.Stat() to examine a file based on the filename.
+func (tfs *TestFS) Stat(name string) (fs.FileInfo, error) {
+	if content, ok := (*tfs)[name]; ok {
+		return &TestFileInfo{name, len(content), 0666}, nil
+	}
+	dirname := name
+	if !strings.HasSuffix(dirname, "/") {
+		dirname = dirname + "/"
+	}
+	for name := range (*tfs) {
+		if strings.HasPrefix(name, dirname) {
+			return &TestFileInfo{name, 8, fs.ModeDir | fs.ModePerm}, nil
+		}
+	}
+	return nil, fmt.Errorf("file not found: %q", name)
+}
+
+// TestFileInfo implements a file info (fs.FileInfo) based on TestFS above.
+type TestFileInfo struct {
+	name string
+	size int
+	mode fs.FileMode
+}
+
+var _ fs.FileInfo = (*TestFileInfo)(nil)
+
+// Name returns the name of the file
+func (fi *TestFileInfo) Name() string {
+	return fi.name
+}
+
+// Size returns the size of the file in bytes.
+func (fi *TestFileInfo) Size() int64 {
+	return int64(fi.size)
+}
+
+// Mode returns the fs.FileMode bits.
+func (fi *TestFileInfo) Mode() fs.FileMode {
+	return fi.mode
+}
+
+// ModTime fakes a modification time.
+func (fi *TestFileInfo) ModTime() time.Time {
+	return time.UnixMicro(0xb0bb)
+}
+
+// IsDir is a synonym for Mode().IsDir()
+func (fi *TestFileInfo) IsDir() bool {
+	return fi.mode.IsDir()
+}
+
+// Sys is unused and returns nil.
+func (fi *TestFileInfo) Sys() any {
+	return nil
+}
+
+// TestFile implements a test file (fs.File) based on TestFS above.
+type TestFile struct {
+	fs   *TestFS
+	name string
+	posn int
+}
+
+var _ fs.File = (*TestFile)(nil)
+
+// Stat not implemented to obviate implementing fs.FileInfo.
+func (f *TestFile) Stat() (fs.FileInfo, error) {
+	return f.fs.Stat(f.name)
+}
+
+// Read copies bytes from the TestFS map.
+func (f *TestFile) Read(b []byte) (int, error) {
+	if f.posn < 0 {
+		return 0, fmt.Errorf("file not open: %q", f.name)
+	}
+	if f.posn >= len((*f.fs)[f.name]) {
+		return 0, io.EOF
+	}
+	n := copy(b, (*f.fs)[f.name][f.posn:])
+	f.posn += n
+	return n, nil
+}
+
+// Close marks the TestFile as no longer in use.
+func (f *TestFile) Close() error {
+	if f.posn < 0 {
+		return fmt.Errorf("file already closed: %q", f.name)
+	}
+	f.posn = -1
+	return nil
+}
diff --git a/tools/event_log_tags.bzl b/tools/event_log_tags.bzl
deleted file mode 100644
index 3766da4..0000000
--- a/tools/event_log_tags.bzl
+++ /dev/null
@@ -1,47 +0,0 @@
-"""Event log tags generation rule"""
-
-load("@bazel_skylib//lib:paths.bzl", "paths")
-load("@rules_android//rules:rules.bzl", "android_library")
-
-def _event_log_tags_impl(ctx):
-    out_files = []
-    for logtag_file in ctx.files.srcs:
-        out_filename = paths.replace_extension(logtag_file.basename, ".java")
-        out_file = ctx.actions.declare_file(out_filename)
-        out_files.append(out_file)
-        ctx.actions.run(
-            inputs = [logtag_file],
-            outputs = [out_file],
-            arguments = [
-                "-o",
-                out_file.path,
-                logtag_file.path,
-            ],
-            progress_message = "Generating Java logtag file from %s" % logtag_file.short_path,
-            executable = ctx.executable._logtag_to_java_tool,
-        )
-    return [DefaultInfo(files = depset(out_files))]
-
-_event_log_tags = rule(
-    implementation = _event_log_tags_impl,
-    attrs = {
-        "srcs": attr.label_list(allow_files = [".logtags"], mandatory = True),
-        "_logtag_to_java_tool": attr.label(
-            executable = True,
-            cfg = "exec",
-            allow_files = True,
-            default = Label("//build/make/tools:java-event-log-tags"),
-        ),
-    },
-)
-
-def event_log_tags(name, srcs):
-    _event_log_tags(
-        name = name + "_gen_logtags",
-        srcs = srcs,
-    )
-
-    android_library(
-        name = name,
-        srcs = [name + "_gen_logtags"],
-    )
diff --git a/tools/fileslist_util.py b/tools/fileslist_util.py
index ff40d51..a1b1197 100755
--- a/tools/fileslist_util.py
+++ b/tools/fileslist_util.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -15,7 +15,9 @@
 # limitations under the License.
 #
 
-import getopt, json, sys
+import argparse
+import json
+import sys
 
 def PrintFileNames(path):
   with open(path) as jf:
@@ -27,42 +29,25 @@
   with open(path) as jf:
     data = json.load(jf)
   for line in data:
-    print "{0:12d}  {1}".format(line["Size"], line["Name"])
+    print(f"{line['Size']:12d}  {line['Name']}")
 
-def PrintUsage(name):
-  print("""
-Usage: %s -[nc] json_files_list
- -n produces list of files only
- -c produces classic installed-files.txt
-""" % (name))
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument("-n", action="store_true",
+                      help="produces list of files only")
+  parser.add_argument("-c", action="store_true",
+                      help="produces classic installed-files.txt")
+  parser.add_argument("json_files_list")
+  args = parser.parse_args()
 
-def main(argv):
-  try:
-    opts, args = getopt.getopt(argv[1:], "nc", "")
-  except getopt.GetoptError, err:
-    print(err)
-    PrintUsage(argv[0])
-    sys.exit(2)
-
-  if len(opts) == 0:
-    print("No conversion option specified")
-    PrintUsage(argv[0])
-    sys.exit(2)
-
-  if len(args) == 0:
-    print("No input file specified")
-    PrintUsage(argv[0])
-    sys.exit(2)
-
-  for o, a in opts:
-    if o == ("-n"):
-      PrintFileNames(args[0])
-      sys.exit()
-    elif o == ("-c"):
-      PrintCanonicalList(args[0])
-      sys.exit()
-    else:
-      assert False, "Unsupported option"
+  if args.n and args.c:
+    sys.exit("Cannot specify both -n and -c")
+  elif args.n:
+    PrintFileNames(args.json_files_list)
+  elif args.c:
+    PrintCanonicalList(args.json_files_list)
+  else:
+    sys.exit("No conversion option specified")
 
 if __name__ == '__main__':
-  main(sys.argv)
+  main()
diff --git a/tools/filter-product-graph.py b/tools/filter-product-graph.py
deleted file mode 100755
index b3a5b42..0000000
--- a/tools/filter-product-graph.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# vim: ts=2 sw=2 nocindent
-
-import re
-import sys
-
-def choose_regex(regs, line):
-  for func,reg in regs:
-    m = reg.match(line)
-    if m:
-      return (func,m)
-  return (None,None)
-
-def gather(included, deps):
-  result = set()
-  for inc in included:
-    result.add(inc)
-    for d in deps:
-      if inc == d[1]:
-        result.add(d[0])
-  return result
-
-def main():
-  deps = []
-  infos = []
-  def dependency(m):
-    deps.append((m.group(1), m.group(2)))
-  def info(m):
-    infos.append((m.group(1), m.group(2)))
-
-  REGS = [
-      (dependency, re.compile(r'"(.*)"\s*->\s*"(.*)"')), 
-      (info, re.compile(r'"(.*)"(\s*\[.*\])')), 
-    ]
-
-  lines = sys.stdin.readlines()
-  lines = [line.strip() for line in lines]
-
-  for line in lines:
-    func,m = choose_regex(REGS, line)
-    if func:
-      func(m)
-
-  # filter
-  sys.stderr.write("argv: " + str(sys.argv) + "\n")
-  if not (len(sys.argv) == 2 and sys.argv[1] == "--all"):
-    targets = sys.argv[1:]
-
-    included = set(targets)
-    prevLen = -1
-    while prevLen != len(included):
-      prevLen = len(included)
-      included = gather(included, deps)
-
-    deps = [dep for dep in deps if dep[1] in included]
-    infos = [info for info in infos if info[0] in included]
-
-  print "digraph {"
-  print "graph [ ratio=.5 ];"
-  for dep in deps:
-    print '"%s" -> "%s"' % dep
-  for info in infos:
-    print '"%s"%s' % info
-  print "}"
-
-
-if __name__ == "__main__":
-  main()
diff --git a/tools/findleaves.py b/tools/findleaves.py
index 97302e9..86f3f3a 100755
--- a/tools/findleaves.py
+++ b/tools/findleaves.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2009 The Android Open Source Project
 #
@@ -121,7 +121,7 @@
   results = list(set(perform_find(mindepth, prune, dirlist, filenames)))
   results.sort()
   for r in results:
-    print r
+    print(r)
 
 if __name__ == "__main__":
   main(sys.argv)
diff --git a/tools/fs_config/Android.bp b/tools/fs_config/Android.bp
index 8891a0a..55fdca4 100644
--- a/tools/fs_config/Android.bp
+++ b/tools/fs_config/Android.bp
@@ -40,14 +40,28 @@
     cflags: ["-Werror"],
 }
 
+python_binary_host {
+    name: "fs_config_generator",
+    srcs: ["fs_config_generator.py"],
+}
+
+python_test_host {
+    name: "test_fs_config_generator",
+    main: "test_fs_config_generator.py",
+    srcs: [
+        "test_fs_config_generator.py",
+        "fs_config_generator.py",
+    ],
+}
+
 target_fs_config_gen_filegroup {
     name: "target_fs_config_gen",
 }
 
 genrule {
     name: "oemaids_header_gen",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) oemaid --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) oemaid --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -67,8 +81,8 @@
 // TARGET_FS_CONFIG_GEN files.
 genrule {
     name: "passwd_gen_system",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) passwd --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) passwd --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -84,8 +98,8 @@
 
 genrule {
     name: "passwd_gen_vendor",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) passwd --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) passwd --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -102,8 +116,8 @@
 
 genrule {
     name: "passwd_gen_odm",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) passwd --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) passwd --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -120,8 +134,8 @@
 
 genrule {
     name: "passwd_gen_product",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) passwd --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) passwd --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -138,8 +152,8 @@
 
 genrule {
     name: "passwd_gen_system_ext",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) passwd --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) passwd --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -159,8 +173,8 @@
 // TARGET_FS_CONFIG_GEN files.
 genrule {
     name: "group_gen_system",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) group --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) group --partition=system --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -176,8 +190,8 @@
 
 genrule {
     name: "group_gen_vendor",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) group --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) group --partition=vendor --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -194,8 +208,8 @@
 
 genrule {
     name: "group_gen_odm",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) group --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) group --partition=odm --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -212,8 +226,8 @@
 
 genrule {
     name: "group_gen_product",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) group --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) group --partition=product --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
@@ -230,8 +244,8 @@
 
 genrule {
     name: "group_gen_system_ext",
-    tool_files: ["fs_config_generator.py"],
-    cmd: "$(location fs_config_generator.py) group --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
+    tools: ["fs_config_generator"],
+    cmd: "$(location fs_config_generator) group --partition=system_ext --aid-header=$(location :android_filesystem_config_header) $(locations :target_fs_config_gen) >$(out)",
     srcs: [
         ":target_fs_config_gen",
         ":android_filesystem_config_header",
diff --git a/tools/fs_config/README.md b/tools/fs_config/README.md
index bad5e10..62d6d1e 100644
--- a/tools/fs_config/README.md
+++ b/tools/fs_config/README.md
@@ -69,13 +69,13 @@
 
 From within the `fs_config` directory, unit tests can be executed like so:
 
-    $ python -m unittest test_fs_config_generator.Tests
-    .............
+    $ python test_fs_config_generator.py
+    ................
     ----------------------------------------------------------------------
-    Ran 13 tests in 0.004s
-
+    Ran 16 tests in 0.004s
     OK
 
+
 One could also use nose if they would like:
 
     $ nose2
diff --git a/tools/fs_config/fs_config_generator.py b/tools/fs_config/fs_config_generator.py
index 098fde6..44480b8 100755
--- a/tools/fs_config/fs_config_generator.py
+++ b/tools/fs_config/fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python2
+#!/usr/bin/env python3
 """Generates config files for Android file system properties.
 
 This script is used for generating configuration files for configuring
@@ -11,7 +11,7 @@
 """
 
 import argparse
-import ConfigParser
+import configparser
 import ctypes
 import re
 import sys
@@ -179,6 +179,10 @@
             and self.normalized_value == other.normalized_value \
             and self.login_shell == other.login_shell
 
+    def __repr__(self):
+        return "AID { identifier = %s, value = %s, normalized_value = %s, login_shell = %s }" % (
+            self.identifier, self.value, self.normalized_value, self.login_shell)
+
     @staticmethod
     def is_friendly(name):
         """Determines if an AID is a freindly name or C define.
@@ -312,7 +316,7 @@
     ]
     _AID_DEFINE = re.compile(r'\s*#define\s+%s.*' % AID.PREFIX)
     _RESERVED_RANGE = re.compile(
-        r'#define AID_(.+)_RESERVED_\d*_*(START|END)\s+(\d+)')
+        r'#define AID_(.+)_RESERVED_(?:(\d+)_)?(START|END)\s+(\d+)')
 
     # AID lines cannot end with _START or _END, ie AID_FOO is OK
     # but AID_FOO_START is skiped. Note that AID_FOOSTART is NOT skipped.
@@ -345,6 +349,7 @@
             aid_file (file): The open AID header file to parse.
         """
 
+        ranges_by_name = {}
         for lineno, line in enumerate(aid_file):
 
             def error_message(msg):
@@ -355,20 +360,24 @@
 
             range_match = self._RESERVED_RANGE.match(line)
             if range_match:
-                partition = range_match.group(1).lower()
-                value = int(range_match.group(3), 0)
+                partition, name, start, value = range_match.groups()
+                partition = partition.lower()
+                if name is None:
+                    name = "unnamed"
+                start = start == "START"
+                value = int(value, 0)
 
                 if partition == 'oem':
                     partition = 'vendor'
 
-                if partition in self._ranges:
-                    if isinstance(self._ranges[partition][-1], int):
-                        self._ranges[partition][-1] = (
-                            self._ranges[partition][-1], value)
-                    else:
-                        self._ranges[partition].append(value)
-                else:
-                    self._ranges[partition] = [value]
+                if partition not in ranges_by_name:
+                    ranges_by_name[partition] = {}
+                if name not in ranges_by_name[partition]:
+                    ranges_by_name[partition][name] = [None, None]
+                if ranges_by_name[partition][name][0 if start else 1] is not None:
+                    sys.exit(error_message("{} of range {} of partition {} was already defined".format(
+                        "Start" if start else "End", name, partition)))
+                ranges_by_name[partition][name][0 if start else 1] = value
 
             if AIDHeaderParser._AID_DEFINE.match(line):
                 chunks = line.split()
@@ -390,6 +399,21 @@
                         error_message('{} for "{}"'.format(
                             exception, identifier)))
 
+        for partition in ranges_by_name:
+            for name in ranges_by_name[partition]:
+                start = ranges_by_name[partition][name][0]
+                end = ranges_by_name[partition][name][1]
+                if start is None:
+                    sys.exit("Range '%s' for partition '%s' had undefined start" % (name, partition))
+                if end is None:
+                    sys.exit("Range '%s' for partition '%s' had undefined end" % (name, partition))
+                if start > end:
+                    sys.exit("Range '%s' for partition '%s' had start after end. Start: %d, end: %d" % (name, partition, start, end))
+
+                if partition not in self._ranges:
+                    self._ranges[partition] = []
+                self._ranges[partition].append((start, end))
+
     def _handle_aid(self, identifier, value):
         """Handle an AID C #define.
 
@@ -439,7 +463,7 @@
         # No core AIDs should be within any oem range.
         for aid in self._aid_value_to_name:
             for ranges in self._ranges.values():
-                if Utils.in_any_range(aid, ranges):
+                if Utils.in_any_range(int(aid, 0), ranges):
                     name = self._aid_value_to_name[aid]
                     raise ValueError(
                         'AID "%s" value: %u within reserved OEM Range: "%s"' %
@@ -545,7 +569,7 @@
         # override previous
         # sections.
 
-        config = ConfigParser.ConfigParser()
+        config = configparser.ConfigParser()
         config.read(file_name)
 
         for section in config.sections():
@@ -589,7 +613,7 @@
 
         ranges = None
 
-        partitions = self._ranges.keys()
+        partitions = list(self._ranges.keys())
         partitions.sort(key=len, reverse=True)
         for partition in partitions:
             if aid.friendly.startswith(partition):
@@ -1049,7 +1073,7 @@
         user_binary = bytearray(ctypes.c_uint16(int(user, 0)))
         group_binary = bytearray(ctypes.c_uint16(int(group, 0)))
         caps_binary = bytearray(ctypes.c_uint64(caps_value))
-        path_binary = ctypes.create_string_buffer(path,
+        path_binary = ctypes.create_string_buffer(path.encode(),
                                                   path_length_aligned_64).raw
 
         out_file.write(length_binary)
@@ -1145,21 +1169,21 @@
         hdr = AIDHeaderParser(args['hdrfile'])
         max_name_length = max(len(aid.friendly) + 1 for aid in hdr.aids)
 
-        print AIDArrayGen._GENERATED
-        print
-        print AIDArrayGen._INCLUDE
-        print
-        print AIDArrayGen._STRUCT_FS_CONFIG % max_name_length
-        print
-        print AIDArrayGen._OPEN_ID_ARRAY
+        print(AIDArrayGen._GENERATED)
+        print()
+        print(AIDArrayGen._INCLUDE)
+        print()
+        print(AIDArrayGen._STRUCT_FS_CONFIG % max_name_length)
+        print()
+        print(AIDArrayGen._OPEN_ID_ARRAY)
 
         for aid in hdr.aids:
-            print AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier)
+            print(AIDArrayGen._ID_ENTRY % (aid.friendly, aid.identifier))
 
-        print AIDArrayGen._CLOSE_FILE_STRUCT
-        print
-        print AIDArrayGen._COUNT
-        print
+        print(AIDArrayGen._CLOSE_FILE_STRUCT)
+        print()
+        print(AIDArrayGen._COUNT)
+        print()
 
 
 @generator('oemaid')
@@ -1201,15 +1225,15 @@
 
         parser = FSConfigFileParser(args['fsconfig'], hdr_parser.ranges)
 
-        print OEMAidGen._GENERATED
+        print(OEMAidGen._GENERATED)
 
-        print OEMAidGen._FILE_IFNDEF_DEFINE
+        print(OEMAidGen._FILE_IFNDEF_DEFINE)
 
         for aid in parser.aids:
             self._print_aid(aid)
-            print
+            print()
 
-        print OEMAidGen._FILE_ENDIF
+        print(OEMAidGen._FILE_ENDIF)
 
     def _print_aid(self, aid):
         """Prints a valid #define AID identifier to stdout.
@@ -1221,10 +1245,10 @@
         # print the source file location of the AID
         found_file = aid.found
         if found_file != self._old_file:
-            print OEMAidGen._FILE_COMMENT % found_file
+            print(OEMAidGen._FILE_COMMENT % found_file)
             self._old_file = found_file
 
-        print OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value)
+        print(OEMAidGen._GENERIC_DEFINE % (aid.identifier, aid.value))
 
 
 @generator('passwd')
@@ -1268,7 +1292,7 @@
             return
 
         aids_by_partition = {}
-        partitions = hdr_parser.ranges.keys()
+        partitions = list(hdr_parser.ranges.keys())
         partitions.sort(key=len, reverse=True)
 
         for aid in aids:
@@ -1307,7 +1331,7 @@
         except ValueError as exception:
             sys.exit(exception)
 
-        print "%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell)
+        print("%s::%s:%s::/:%s" % (logon, uid, uid, aid.login_shell))
 
 
 @generator('group')
@@ -1332,7 +1356,7 @@
         except ValueError as exception:
             sys.exit(exception)
 
-        print "%s::%s:" % (logon, uid)
+        print("%s::%s:" % (logon, uid))
 
 
 @generator('print')
@@ -1355,7 +1379,7 @@
         aids.sort(key=lambda item: int(item.normalized_value))
 
         for aid in aids:
-            print '%s %s' % (aid.identifier, aid.normalized_value)
+            print('%s %s' % (aid.identifier, aid.normalized_value))
 
 
 def main():
@@ -1369,7 +1393,7 @@
     gens = generator.get()
 
     # for each gen, instantiate and add them as an option
-    for name, gen in gens.iteritems():
+    for name, gen in gens.items():
 
         generator_option_parser = subparser.add_parser(name, help=gen.__doc__)
         generator_option_parser.set_defaults(which=name)
diff --git a/tools/fs_config/test_fs_config_generator.py b/tools/fs_config/test_fs_config_generator.py
index b7f173e..cbf46a1 100755
--- a/tools/fs_config/test_fs_config_generator.py
+++ b/tools/fs_config/test_fs_config_generator.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 """Unit test suite for the fs_config_genertor.py tool."""
 
 import tempfile
@@ -64,7 +64,7 @@
     def test_aid_header_parser_good(self):
         """Test AID Header Parser good input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_FOO 1000
@@ -78,11 +78,11 @@
             temp_file.flush()
 
             parser = AIDHeaderParser(temp_file.name)
-            oem_ranges = parser.oem_ranges
+            ranges = parser.ranges
             aids = parser.aids
 
-            self.assertTrue((2900, 2999) in oem_ranges)
-            self.assertFalse((5000, 6000) in oem_ranges)
+            self.assertTrue((2900, 2999) in ranges["vendor"])
+            self.assertFalse((5000, 6000) in ranges["vendor"])
 
             for aid in aids:
                 self.assertTrue(aid.normalized_value in ['1000', '1001'])
@@ -91,7 +91,7 @@
     def test_aid_header_parser_good_unordered(self):
         """Test AID Header Parser good unordered input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_FOO 1000
@@ -105,11 +105,11 @@
             temp_file.flush()
 
             parser = AIDHeaderParser(temp_file.name)
-            oem_ranges = parser.oem_ranges
+            ranges = parser.ranges
             aids = parser.aids
 
-            self.assertTrue((2900, 2999) in oem_ranges)
-            self.assertFalse((5000, 6000) in oem_ranges)
+            self.assertTrue((2900, 2999) in ranges["vendor"])
+            self.assertFalse((5000, 6000) in ranges["vendor"])
 
             for aid in aids:
                 self.assertTrue(aid.normalized_value in ['1000', '1001'])
@@ -118,7 +118,7 @@
     def test_aid_header_parser_bad_aid(self):
         """Test AID Header Parser bad aid input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_FOO "bad"
@@ -131,7 +131,7 @@
     def test_aid_header_parser_bad_oem_range(self):
         """Test AID Header Parser bad oem range input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_OEM_RESERVED_START 2900
@@ -145,7 +145,7 @@
     def test_aid_header_parser_bad_oem_range_no_end(self):
         """Test AID Header Parser bad oem range (no end) input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_OEM_RESERVED_START 2900
@@ -158,7 +158,7 @@
     def test_aid_header_parser_bad_oem_range_no_start(self):
         """Test AID Header Parser bad oem range (no start) input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_OEM_RESERVED_END 2900
@@ -168,10 +168,26 @@
             with self.assertRaises(SystemExit):
                 AIDHeaderParser(temp_file.name)
 
+    def test_aid_header_parser_bad_oem_range_duplicated(self):
+        """Test AID Header Parser bad oem range (no start) input file"""
+
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
+            temp_file.write(
+                textwrap.dedent("""
+                #define AID_OEM_RESERVED_START 2000
+                #define AID_OEM_RESERVED_END 2900
+                #define AID_OEM_RESERVED_START 3000
+                #define AID_OEM_RESERVED_END 3900
+            """))
+            temp_file.flush()
+
+            with self.assertRaises(SystemExit):
+                AIDHeaderParser(temp_file.name)
+
     def test_aid_header_parser_bad_oem_range_mismatch_start_end(self):
         """Test AID Header Parser bad oem range mismatched input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_OEM_RESERVED_START 2900
@@ -185,7 +201,7 @@
     def test_aid_header_parser_bad_duplicate_ranges(self):
         """Test AID Header Parser exits cleanly on duplicate AIDs"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_FOO 100
@@ -206,7 +222,7 @@
           - https://android-review.googlesource.com/#/c/313169
         """
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 #define AID_APP              10000 /* TODO: switch users over to AID_APP_START */
@@ -241,7 +257,7 @@
     def test_fs_config_file_parser_good(self):
         """Test FSConfig Parser good input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 [/system/bin/file]
@@ -262,7 +278,7 @@
             """))
             temp_file.flush()
 
-            parser = FSConfigFileParser([temp_file.name], [(5000, 5999)])
+            parser = FSConfigFileParser([temp_file.name], {"oem1": [(5000, 5999)]})
             files = parser.files
             dirs = parser.dirs
             aids = parser.aids
@@ -284,12 +300,12 @@
                              FSConfig('0777', 'AID_FOO', 'AID_SYSTEM', '0',
                                       '/vendor/path/dir/', temp_file.name))
 
-            self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name, '/vendor/bin/sh'))
+            self.assertEqual(aid, AID('AID_OEM1', '0x1389', temp_file.name, '/bin/sh'))
 
     def test_fs_config_file_parser_bad(self):
         """Test FSConfig Parser bad input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 [/system/bin/file]
@@ -298,12 +314,12 @@
             temp_file.flush()
 
             with self.assertRaises(SystemExit):
-                FSConfigFileParser([temp_file.name], [(5000, 5999)])
+                FSConfigFileParser([temp_file.name], {})
 
     def test_fs_config_file_parser_bad_aid_range(self):
         """Test FSConfig Parser bad aid range value input file"""
 
-        with tempfile.NamedTemporaryFile() as temp_file:
+        with tempfile.NamedTemporaryFile(mode='w') as temp_file:
             temp_file.write(
                 textwrap.dedent("""
                 [AID_OEM1]
@@ -312,4 +328,7 @@
             temp_file.flush()
 
             with self.assertRaises(SystemExit):
-                FSConfigFileParser([temp_file.name], [(5000, 5999)])
+                FSConfigFileParser([temp_file.name], {"oem1": [(5000, 5999)]})
+
+if __name__ == "__main__":
+    unittest.main()
diff --git a/tools/java-event-log-tags.py b/tools/java-event-log-tags.py
index 4bd6d2b..bbd65fa 100755
--- a/tools/java-event-log-tags.py
+++ b/tools/java-event-log-tags.py
@@ -100,7 +100,8 @@
              " * Source file: %s\n"
              " */\n\n" % (fn,))
 
-buffer.write("package %s;\n\n" % (tagfile.options["java_package"][0],))
+# .rstrip(";") to avoid an empty top-level statement errorprone error
+buffer.write("package %s;\n\n" % (tagfile.options["java_package"][0].rstrip(";"),))
 
 basename, _ = os.path.splitext(os.path.basename(fn))
 
diff --git a/tools/java-layers.py b/tools/java-layers.py
deleted file mode 100755
index b3aec2b..0000000
--- a/tools/java-layers.py
+++ /dev/null
@@ -1,257 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import re
-import sys
-
-def fail_with_usage():
-  sys.stderr.write("usage: java-layers.py DEPENDENCY_FILE SOURCE_DIRECTORIES...\n")
-  sys.stderr.write("\n")
-  sys.stderr.write("Enforces layering between java packages.  Scans\n")
-  sys.stderr.write("DIRECTORY and prints errors when the packages violate\n")
-  sys.stderr.write("the rules defined in the DEPENDENCY_FILE.\n")
-  sys.stderr.write("\n")
-  sys.stderr.write("Prints a warning when an unknown package is encountered\n")
-  sys.stderr.write("on the assumption that it should fit somewhere into the\n")
-  sys.stderr.write("layering.\n")
-  sys.stderr.write("\n")
-  sys.stderr.write("DEPENDENCY_FILE format\n")
-  sys.stderr.write("  - # starts comment\n")
-  sys.stderr.write("  - Lines consisting of two java package names:  The\n")
-  sys.stderr.write("    first package listed must not contain any references\n")
-  sys.stderr.write("    to any classes present in the second package, or any\n")
-  sys.stderr.write("    of its dependencies.\n")
-  sys.stderr.write("  - Lines consisting of one java package name:  The\n")
-  sys.stderr.write("    packge is assumed to be a high level package and\n")
-  sys.stderr.write("    nothing may depend on it.\n")
-  sys.stderr.write("  - Lines consisting of a dash (+) followed by one java\n")
-  sys.stderr.write("    package name: The package is considered a low level\n")
-  sys.stderr.write("    package and may not import any of the other packages\n")
-  sys.stderr.write("    listed in the dependency file.\n")
-  sys.stderr.write("  - Lines consisting of a plus (-) followed by one java\n")
-  sys.stderr.write("    package name: The package is considered \'legacy\'\n")
-  sys.stderr.write("    and excluded from errors.\n")
-  sys.stderr.write("\n")
-  sys.exit(1)
-
-class Dependency:
-  def __init__(self, filename, lineno, lower, top, lowlevel, legacy):
-    self.filename = filename
-    self.lineno = lineno
-    self.lower = lower
-    self.top = top
-    self.lowlevel = lowlevel
-    self.legacy = legacy
-    self.uppers = []
-    self.transitive = set()
-
-  def matches(self, imp):
-    for d in self.transitive:
-      if imp.startswith(d):
-        return True
-    return False
-
-class Dependencies:
-  def __init__(self, deps):
-    def recurse(obj, dep, visited):
-      global err
-      if dep in visited:
-        sys.stderr.write("%s:%d: Circular dependency found:\n"
-            % (dep.filename, dep.lineno))
-        for v in visited:
-          sys.stderr.write("%s:%d:    Dependency: %s\n"
-              % (v.filename, v.lineno, v.lower))
-        err = True
-        return
-      visited.append(dep)
-      for upper in dep.uppers:
-        obj.transitive.add(upper)
-        if upper in deps:
-          recurse(obj, deps[upper], visited)
-    self.deps = deps
-    self.parts = [(dep.lower.split('.'),dep) for dep in deps.itervalues()]
-    # transitive closure of dependencies
-    for dep in deps.itervalues():
-      recurse(dep, dep, [])
-    # disallow everything from the low level components
-    for dep in deps.itervalues():
-      if dep.lowlevel:
-        for d in deps.itervalues():
-          if dep != d and not d.legacy:
-            dep.transitive.add(d.lower)
-    # disallow the 'top' components everywhere but in their own package
-    for dep in deps.itervalues():
-      if dep.top and not dep.legacy:
-        for d in deps.itervalues():
-          if dep != d and not d.legacy:
-            d.transitive.add(dep.lower)
-    for dep in deps.itervalues():
-      dep.transitive = set([x+"." for x in dep.transitive])
-    if False:
-      for dep in deps.itervalues():
-        print "-->", dep.lower, "-->", dep.transitive
-
-  # Lookup the dep object for the given package.  If pkg is a subpackage
-  # of one with a rule, that one will be returned.  If no matches are found,
-  # None is returned.
-  def lookup(self, pkg):
-    # Returns the number of parts that match
-    def compare_parts(parts, pkg):
-      if len(parts) > len(pkg):
-        return 0
-      n = 0
-      for i in range(0, len(parts)):
-        if parts[i] != pkg[i]:
-          return 0
-        n = n + 1
-      return n
-    pkg = pkg.split(".")
-    matched = 0
-    result = None
-    for (parts,dep) in self.parts:
-      x = compare_parts(parts, pkg)
-      if x > matched:
-        matched = x
-        result = dep
-    return result
-
-def parse_dependency_file(filename):
-  global err
-  f = file(filename)
-  lines = f.readlines()
-  f.close()
-  def lineno(s, i):
-    i[0] = i[0] + 1
-    return (i[0],s)
-  n = [0]
-  lines = [lineno(x,n) for x in lines]
-  lines = [(n,s.split("#")[0].strip()) for (n,s) in lines]
-  lines = [(n,s) for (n,s) in lines if len(s) > 0]
-  lines = [(n,s.split()) for (n,s) in lines]
-  deps = {}
-  for n,words in lines:
-    if len(words) == 1:
-      lower = words[0]
-      top = True
-      legacy = False
-      lowlevel = False
-      if lower[0] == '+':
-        lower = lower[1:]
-        top = False
-        lowlevel = True
-      elif lower[0] == '-':
-        lower = lower[1:]
-        legacy = True
-      if lower in deps:
-        sys.stderr.write(("%s:%d: Package '%s' already defined on"
-            + " line %d.\n") % (filename, n, lower, deps[lower].lineno))
-        err = True
-      else:
-        deps[lower] = Dependency(filename, n, lower, top, lowlevel, legacy)
-    elif len(words) == 2:
-      lower = words[0]
-      upper = words[1]
-      if lower in deps:
-        dep = deps[lower]
-        if dep.top:
-          sys.stderr.write(("%s:%d: Can't add dependency to top level package "
-            + "'%s'\n") % (filename, n, lower))
-          err = True
-      else:
-        dep = Dependency(filename, n, lower, False, False, False)
-        deps[lower] = dep
-      dep.uppers.append(upper)
-    else:
-      sys.stderr.write("%s:%d: Too many words on line starting at \'%s\'\n" % (
-          filename, n, words[2]))
-      err = True
-  return Dependencies(deps)
-
-def find_java_files(srcs):
-  result = []
-  for d in srcs:
-    if d[0] == '@':
-      f = file(d[1:])
-      result.extend([fn for fn in [s.strip() for s in f.readlines()]
-          if len(fn) != 0])
-      f.close()
-    else:
-      for root, dirs, files in os.walk(d):
-        result.extend([os.sep.join((root,f)) for f in files
-            if f.lower().endswith(".java")])
-  return result
-
-COMMENTS = re.compile("//.*?\n|/\*.*?\*/", re.S)
-PACKAGE = re.compile("package\s+(.*)")
-IMPORT = re.compile("import\s+(.*)")
-
-def examine_java_file(deps, filename):
-  global err
-  # Yes, this is a crappy java parser.  Write a better one if you want to.
-  f = file(filename)
-  text = f.read()
-  f.close()
-  text = COMMENTS.sub("", text)
-  index = text.find("{")
-  if index < 0:
-    sys.stderr.write(("%s: Error: Unable to parse java. Can't find class "
-        + "declaration.\n") % filename)
-    err = True
-    return
-  text = text[0:index]
-  statements = [s.strip() for s in text.split(";")]
-  # First comes the package declaration.  Then iterate while we see import
-  # statements.  Anything else is either bad syntax that we don't care about
-  # because the compiler will fail, or the beginning of the class declaration.
-  m = PACKAGE.match(statements[0])
-  if not m:
-    sys.stderr.write(("%s: Error: Unable to parse java. Missing package "
-        + "statement.\n") % filename)
-    err = True
-    return
-  pkg = m.group(1)
-  imports = []
-  for statement in statements[1:]:
-    m = IMPORT.match(statement)
-    if not m:
-      break
-    imports.append(m.group(1))
-  # Do the checking
-  if False:
-    print filename
-    print "'%s' --> %s" % (pkg, imports)
-  dep = deps.lookup(pkg)
-  if not dep:
-    sys.stderr.write(("%s: Error: Package does not appear in dependency file: "
-      + "%s\n") % (filename, pkg))
-    err = True
-    return
-  for imp in imports:
-    if dep.matches(imp):
-      sys.stderr.write("%s: Illegal import in package '%s' of '%s'\n"
-          % (filename, pkg, imp))
-      err = True
-
-err = False
-
-def main(argv):
-  if len(argv) < 3:
-    fail_with_usage()
-  deps = parse_dependency_file(argv[1])
-
-  if err:
-    sys.exit(1)
-
-  java = find_java_files(argv[2:])
-  for filename in java:
-    examine_java_file(deps, filename)
-
-  if err:
-    sys.stderr.write("%s: Using this file as dependency file.\n" % argv[1])
-    sys.exit(1)
-
-  sys.exit(0)
-
-if __name__ == "__main__":
-  main(sys.argv)
-
diff --git a/tools/mk2bp_catalog.py b/tools/mk2bp_catalog.py
index c2afb9b..3fc6236 100755
--- a/tools/mk2bp_catalog.py
+++ b/tools/mk2bp_catalog.py
@@ -308,19 +308,31 @@
     print("""<th class="Count Warning">%s</th>""" % analyzer.title)
   print("      </tr>")
 
+# get all modules in $(PRODUCT_PACKAGE) and the corresponding deps
+def get_module_product_packages_plus_deps(initial_modules, result, soong_data):
+  for module in initial_modules:
+    if module in result:
+      continue
+    result.add(module)
+    if module in soong_data.deps:
+      get_module_product_packages_plus_deps(soong_data.deps[module], result, soong_data)
+
 def main():
   parser = argparse.ArgumentParser(description="Info about remaining Android.mk files.")
   parser.add_argument("--device", type=str, required=True,
                       help="TARGET_DEVICE")
+  parser.add_argument("--product-packages", type=argparse.FileType('r'),
+                      default=None,
+                      help="PRODUCT_PACKAGES")
   parser.add_argument("--title", type=str,
                       help="page title")
   parser.add_argument("--codesearch", type=str,
                       default="https://cs.android.com/android/platform/superproject/+/master:",
                       help="page title")
-  parser.add_argument("--out_dir", type=str,
+  parser.add_argument("--out-dir", type=str,
                       default=None,
                       help="Equivalent of $OUT_DIR, which will also be checked if"
-                        + " --out_dir is unset. If neither is set, default is"
+                        + " --out-dir is unset. If neither is set, default is"
                         + " 'out'.")
   parser.add_argument("--mode", type=str,
                       default="html",
@@ -354,16 +366,25 @@
       continue
     all_makefiles[filename] = Makefile(filename)
 
+  # Get all the modules in $(PRODUCT_PACKAGES) and the correspoding deps
+  product_package_modules_plus_deps = set()
+  if args.product_packages:
+    product_package_top_modules = args.product_packages.read().strip().split('\n')
+    get_module_product_packages_plus_deps(product_package_top_modules, product_package_modules_plus_deps, soong)
+
   if args.mode == "html":
-    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    HtmlProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
   elif args.mode == "csv":
-    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles).execute()
+    CsvProcessor(args=args, soong=soong, all_makefiles=all_makefiles,
+        product_packages_modules=product_package_modules_plus_deps).execute()
 
 class HtmlProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
     self.annotations = Annotations()
 
   def execute(self):
@@ -376,6 +397,8 @@
     modules_by_partition = dict()
     partitions = set()
     for installed, module in self.soong.installed.items():
+      if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+        continue
       partition = get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT, installed)
       modules_by_partition.setdefault(partition, []).append(module)
       partitions.add(partition)
@@ -985,10 +1008,11 @@
       return "";
 
 class CsvProcessor(object):
-  def __init__(self, args, soong, all_makefiles):
+  def __init__(self, args, soong, all_makefiles, product_packages_modules):
     self.args = args
     self.soong = soong
     self.all_makefiles = all_makefiles
+    self.product_packages_modules = product_packages_modules
 
   def execute(self):
     csvout = csv.writer(sys.stdout)
@@ -1004,6 +1028,8 @@
     for filename in sorted(self.all_makefiles.keys()):
       makefile = self.all_makefiles[filename]
       for module in self.soong.reverse_makefiles[filename]:
+        if len(self.product_packages_modules) > 0 and module not in self.product_packages_modules:
+          continue
         row = [filename, module]
         # Partitions
         row.append(";".join(sorted(set([get_partition_from_installed(HOST_OUT_ROOT, PRODUCT_OUT,
diff --git a/tools/mk2bp_partition.py b/tools/mk2bp_partition.py
new file mode 100644
index 0000000..30c1135
--- /dev/null
+++ b/tools/mk2bp_partition.py
@@ -0,0 +1,100 @@
+#!/usr/bin/env python3
+
+"""
+The complete list of the remaining Make files in each partition for all lunch targets
+
+How to run?
+python3 $(path-to-file)/mk2bp_partition.py
+"""
+
+from pathlib import Path
+
+import csv
+import datetime
+import os
+import shutil
+import subprocess
+import sys
+import time
+
+def get_top():
+  path = '.'
+  while not os.path.isfile(os.path.join(path, 'build/soong/soong_ui.bash')):
+    if os.path.abspath(path) == '/':
+      sys.exit('Could not find android source tree root.')
+    path = os.path.join(path, '..')
+  return os.path.abspath(path)
+
+# get the values of a build variable
+def get_build_var(variable, product, build_variant):
+  """Returns the result of the shell command get_build_var."""
+  env = {
+      **os.environ,
+      'TARGET_PRODUCT': product if product else '',
+      'TARGET_BUILD_VARIANT': build_variant if build_variant else '',
+  }
+  return subprocess.run([
+      'build/soong/soong_ui.bash',
+      '--dumpvar-mode',
+      variable
+  ], check=True, capture_output=True, env=env, text=True).stdout.strip()
+
+def get_make_file_partitions():
+    lunch_targets = set(get_build_var("all_named_products", "", "").split())
+    total_lunch_targets = len(lunch_targets)
+    makefile_by_partition = dict()
+    partitions = set()
+    current_count = 0
+    start_time = time.time()
+    # cannot run command `m lunch_target`
+    broken_targets = {"mainline_sdk", "ndk"}
+    for lunch_target in sorted(lunch_targets):
+        current_count += 1
+        current_time = time.time()
+        print (current_count, "/", total_lunch_targets, lunch_target, datetime.timedelta(seconds=current_time - start_time))
+        if lunch_target in broken_targets:
+            continue
+        installed_product_out = get_build_var("PRODUCT_OUT", lunch_target, "userdebug")
+        filename = os.path.join(installed_product_out, "mk2bp_remaining.csv")
+        copy_filename = os.path.join(installed_product_out, lunch_target + "_mk2bp_remaining.csv")
+        # only generate if not exists
+        if not os.path.exists(copy_filename):
+            bash_cmd = "bash build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=" + lunch_target
+            bash_cmd += " TARGET_BUILD_VARIANT=userdebug " + filename
+            subprocess.run(bash_cmd, shell=True, text=True, check=True, stdout=subprocess.DEVNULL)
+            # generate a copied .csv file, to avoid possible overwritings
+            with open(copy_filename, "w") as file:
+                shutil.copyfile(filename, copy_filename)
+
+        # open mk2bp_remaining.csv file
+        with open(copy_filename, "r") as csvfile:
+            reader = csv.reader(csvfile, delimiter=",", quotechar='"')
+            # bypass the header row
+            next(reader, None)
+            for row in reader:
+                # read partition information
+                partition = row[2]
+                makefile_by_partition.setdefault(partition, set()).add(row[0])
+                partitions.add(partition)
+
+    # write merged make file list for each partition into a csv file
+    installed_path = Path(installed_product_out).parents[0].as_posix()
+    csv_path = installed_path + "/mk2bp_partition.csv"
+    with open(csv_path, "wt") as csvfile:
+        writer = csv.writer(csvfile, delimiter=",")
+        count_makefile = 0
+        for partition in sorted(partitions):
+            number_file = len(makefile_by_partition[partition])
+            count_makefile += number_file
+            writer.writerow([partition, number_file])
+            for makefile in sorted(makefile_by_partition[partition]):
+                writer.writerow([makefile])
+        row = ["The total count of make files is ", count_makefile]
+        writer.writerow(row)
+
+def main():
+    os.chdir(get_top())
+    get_make_file_partitions()
+
+if __name__ == "__main__":
+    main()
diff --git a/tools/normalize_path.py b/tools/normalize_path.py
index 6c4d548..363df1f 100755
--- a/tools/normalize_path.py
+++ b/tools/normalize_path.py
@@ -22,8 +22,8 @@
 
 if len(sys.argv) > 1:
   for p in sys.argv[1:]:
-    print os.path.normpath(p)
+    print(os.path.normpath(p))
   sys.exit(0)
 
 for line in sys.stdin:
-  print os.path.normpath(line.strip())
+  print(os.path.normpath(line.strip()))
diff --git a/tools/parsedeps.py b/tools/parsedeps.py
deleted file mode 100755
index 32d8ad7..0000000
--- a/tools/parsedeps.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python
-# vim: ts=2 sw=2
-
-import optparse
-import re
-import sys
-
-
-class Dependency:
-  def __init__(self, tgt):
-    self.tgt = tgt
-    self.pos = ""
-    self.prereqs = set()
-    self.visit = 0
-
-  def add(self, prereq):
-    self.prereqs.add(prereq)
-
-
-class Dependencies:
-  def __init__(self):
-    self.lines = {}
-    self.__visit = 0
-    self.count = 0
-
-  def add(self, tgt, prereq):
-    t = self.lines.get(tgt)
-    if not t:
-      t = Dependency(tgt)
-      self.lines[tgt] = t
-    p = self.lines.get(prereq)
-    if not p:
-      p = Dependency(prereq)
-      self.lines[prereq] = p
-    t.add(p)
-    self.count = self.count + 1
-
-  def setPos(self, tgt, pos):
-    t = self.lines.get(tgt)
-    if not t:
-      t = Dependency(tgt)
-      self.lines[tgt] = t
-    t.pos = pos
-
-  def get(self, tgt):
-    if self.lines.has_key(tgt):
-      return self.lines[tgt]
-    else:
-      return None
-
-  def __iter__(self):
-    return self.lines.iteritems()
-
-  def trace(self, tgt, prereq):
-    self.__visit = self.__visit + 1
-    d = self.lines.get(tgt)
-    if not d:
-      return
-    return self.__trace(d, prereq)
-
-  def __trace(self, d, prereq):
-    if d.visit == self.__visit:
-      return d.trace
-    if d.tgt == prereq:
-      return [ [ d ], ]
-    d.visit = self.__visit
-    result = []
-    for pre in d.prereqs:
-      recursed = self.__trace(pre, prereq)
-      for r in recursed:
-        result.append([ d ] + r)
-    d.trace = result
-    return result
-
-def help():
-  print "Commands:"
-  print "  dep TARGET             Print the prerequisites for TARGET"
-  print "  trace TARGET PREREQ    Print the paths from TARGET to PREREQ"
-
-
-def main(argv):
-  opts = optparse.OptionParser()
-  opts.add_option("-i", "--interactive", action="store_true", dest="interactive",
-                    help="Interactive mode")
-  (options, args) = opts.parse_args()
-
-  deps = Dependencies()
-
-  filename = args[0]
-  print "Reading %s" % filename
-
-  if True:
-    f = open(filename)
-    for line in f:
-      line = line.strip()
-      if len(line) > 0:
-        if line[0] == '#':
-          pos,tgt = line.rsplit(":", 1)
-          pos = pos[1:].strip()
-          tgt = tgt.strip()
-          deps.setPos(tgt, pos)
-        else:
-          (tgt,prereq) = line.split(':', 1)
-          tgt = tgt.strip()
-          prereq = prereq.strip()
-          deps.add(tgt, prereq)
-    f.close()
-
-  print "Read %d dependencies. %d targets." % (deps.count, len(deps.lines))
-  while True:
-    line = raw_input("target> ")
-    if not line.strip():
-      continue
-    split = line.split()
-    cmd = split[0]
-    if len(split) == 2 and cmd == "dep":
-      tgt = split[1]
-      d = deps.get(tgt)
-      if d:
-        for prereq in d.prereqs:
-          print prereq.tgt
-    elif len(split) == 3 and cmd == "trace":
-      tgt = split[1]
-      prereq = split[2]
-      if False:
-        print "from %s to %s" % (tgt, prereq)
-      trace = deps.trace(tgt, prereq)
-      if trace:
-        width = 0
-        for g in trace:
-          for t in g:
-            if len(t.tgt) > width:
-              width = len(t.tgt)
-        for g in trace:
-          for t in g:
-            if t.pos:
-              print t.tgt, " " * (width-len(t.tgt)), "  #", t.pos
-            else:
-              print t.tgt
-          print
-    else:
-      help()
-
-if __name__ == "__main__":
-  try:
-    main(sys.argv)
-  except KeyboardInterrupt:
-    print
-  except EOFError:
-    print
-
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index 4915de9..32afa45 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -20,7 +20,7 @@
 	"os"
 	"os/exec"
 	"path/filepath"
-	"regexp"
+	"sort"
 	"strings"
 
 	"go.starlark.net/starlark"
@@ -112,36 +112,6 @@
 	return e.globals, e.err
 }
 
-// fileExists returns True if file with given name exists.
-func fileExists(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
-	kwargs []starlark.Tuple) (starlark.Value, error) {
-	var path string
-	if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &path); err != nil {
-		return starlark.None, err
-	}
-	if _, err := os.Stat(path); err != nil {
-		return starlark.False, nil
-	}
-	return starlark.True, nil
-}
-
-// regexMatch(pattern, s) returns True if s matches pattern (a regex)
-func regexMatch(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
-	kwargs []starlark.Tuple) (starlark.Value, error) {
-	var pattern, s string
-	if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 2, &pattern, &s); err != nil {
-		return starlark.None, err
-	}
-	match, err := regexp.MatchString(pattern, s)
-	if err != nil {
-		return starlark.None, err
-	}
-	if match {
-		return starlark.True, nil
-	}
-	return starlark.False, nil
-}
-
 // wildcard(pattern, top=None) expands shell's glob pattern. If 'top' is present,
 // the 'top/pattern' is globbed and then 'top/' prefix is removed.
 func wildcard(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
@@ -168,6 +138,10 @@
 			files[i] = strings.TrimPrefix(files[i], prefix)
 		}
 	}
+	// Kati uses glob(3) with no flags, which means it's sorted
+	// because GLOB_NOSORT is not passed. Go's glob is not
+	// guaranteed to sort the results.
+	sort.Strings(files)
 	return makeStringList(files), nil
 }
 
@@ -287,12 +261,8 @@
 		"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
 		"rblf_cli": structFromEnv(env),
 		"rblf_env": structFromEnv(os.Environ()),
-		// To convert makefile's $(wildcard foo)
-		"rblf_file_exists": starlark.NewBuiltin("rblf_file_exists", fileExists),
 		// To convert find-copy-subdir and product-copy-files-by pattern
 		"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
-		// To convert makefile's $(filter ...)/$(filter-out)
-		"rblf_regex": starlark.NewBuiltin("rblf_regex", regexMatch),
 		// To convert makefile's $(shell cmd)
 		"rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
 		// Output to stderr
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
index 3be5ee6..97f6ce9 100644
--- a/tools/rbcrun/host_test.go
+++ b/tools/rbcrun/host_test.go
@@ -147,10 +147,6 @@
 	}
 }
 
-func TestRegex(t *testing.T) {
-	exerciseStarlarkTestFile(t, "testdata/regex.star")
-}
-
 func TestShell(t *testing.T) {
 	if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
 		t.Fatal(err)
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 50e39bf..2ee78fc 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -4,9 +4,6 @@
 
 def test():
     myname = "file_ops.star"
-    assert.true(rblf_file_exists("."), "./ exists ")
-    assert.true(rblf_file_exists(myname), "the file %s does exist" % myname)
-    assert.true(not rblf_file_exists("no_such_file"), "the file no_such_file does not exist")
     files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in  %s" % (myname, files))
     files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
index 913fb7d..be04f75 100644
--- a/tools/rbcrun/testdata/module1.star
+++ b/tools/rbcrun/testdata/module1.star
@@ -2,6 +2,6 @@
 load("assert.star", "assert")
 
 # Make sure that builtins are defined for the loaded module, too
-assert.true(rblf_file_exists("module1.star"))
-assert.true(not rblf_file_exists("no_such file"))
+assert.true(rblf_wildcard("module1.star"))
+assert.true(not rblf_wildcard("no_such file"))
 test = "module1"
diff --git a/tools/rbcrun/testdata/regex.star b/tools/rbcrun/testdata/regex.star
deleted file mode 100644
index 04e1d42..0000000
--- a/tools/rbcrun/testdata/regex.star
+++ /dev/null
@@ -1,13 +0,0 @@
-# Tests rblf_regex
-load("assert.star", "assert")
-
-
-def test():
-    pattern = "^(foo.*bar|abc.*d|1.*)$"
-    for w in ("foobar", "fooxbar", "abcxd", "123"):
-        assert.true(rblf_regex(pattern, w), "%s should match %s" % (w, pattern))
-    for w in ("afoobar", "abcde"):
-        assert.true(not rblf_regex(pattern, w), "%s should not match %s" % (w, pattern))
-
-
-test()
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index d8e34b7..29fc771 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -37,6 +37,7 @@
         "releasetools_build_image",
         "releasetools_build_super_image",
         "releasetools_common",
+        "libavbtool",
     ],
     required: [
         "care_map_generator",
@@ -62,7 +63,7 @@
         "mkuserimg_mke2fs",
         "simg2img",
         "tune2fs",
-        "mkf2fsuserimg.sh",
+        "mkf2fsuserimg",
         "fsck.f2fs",
     ],
 }
@@ -94,10 +95,13 @@
         "check_target_files_vintf.py",
     ],
     libs: [
+        "apex_manifest",
         "releasetools_common",
     ],
     required: [
         "checkvintf",
+        "deapexer",
+        "dump_apex_info",
     ],
 }
 
@@ -150,7 +154,6 @@
         "edify_generator.py",
         "non_ab_ota.py",
         "ota_from_target_files.py",
-        "ota_utils.py",
         "target_files_diff.py",
     ],
     libs: [
@@ -160,6 +163,7 @@
         "releasetools_verity_utils",
         "apex_manifest",
         "care_map_proto_py",
+        "ota_utils_lib",
     ],
     required: [
         "brillo_update_payload",
@@ -324,6 +328,33 @@
     ],
 }
 
+python_library_host {
+    name: "ota_utils_lib",
+    srcs: [
+        "ota_utils.py",
+        "payload_signer.py",
+    ],
+}
+
+python_binary_host {
+    name: "merge_ota",
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    srcs: [
+        "merge_ota.py",
+    ],
+    libs: [
+        "ota_metadata_proto",
+        "update_payload",
+        "care_map_proto_py",
+        "releasetools_common",
+        "ota_utils_lib",
+    ],
+}
+
 python_binary_host {
     name: "build_image",
     defaults: [
@@ -519,23 +550,6 @@
 }
 
 python_binary_host {
-    name: "fsverity_manifest_generator",
-    defaults: ["releasetools_binary_defaults"],
-    srcs: [
-        "fsverity_manifest_generator.py",
-    ],
-    libs: [
-        "fsverity_digests_proto_python",
-        "releasetools_common",
-    ],
-    required: [
-        "aapt2",
-        "apksigner",
-        "fsverity",
-    ],
-}
-
-python_binary_host {
     name: "fsverity_metadata_generator",
     defaults: ["releasetools_binary_defaults"],
     srcs: [
@@ -561,6 +575,7 @@
         "sign_apex.py",
         "sign_target_files_apks.py",
         "validate_target_files.py",
+        "merge_ota.py",
         ":releasetools_merge_sources",
         ":releasetools_merge_tests",
 
@@ -577,6 +592,7 @@
         "releasetools_img_from_target_files",
         "releasetools_ota_from_target_files",
         "releasetools_verity_utils",
+        "update_payload",
     ],
     data: [
         "testdata/**/*",
diff --git a/tools/releasetools/add_img_to_target_files b/tools/releasetools/add_img_to_target_files
deleted file mode 120000
index 04323bd..0000000
--- a/tools/releasetools/add_img_to_target_files
+++ /dev/null
@@ -1 +0,0 @@
-add_img_to_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e3db161..d308a55 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -46,6 +46,7 @@
 
 from __future__ import print_function
 
+import avbtool
 import datetime
 import logging
 import os
@@ -62,9 +63,11 @@
 import common
 import verity_utils
 import ota_metadata_pb2
+import rangelib
+import sparse_img
 
 from apex_utils import GetApexInfoFromTargetFiles
-from common import AddCareMapForAbOta, ZipDelete
+from common import ZipDelete, PARTITIONS_WITH_CARE_MAP, ExternalError, RunAndCheckOutput, IsSparseImage, MakeTempFile, ZipWrite
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -76,8 +79,6 @@
 OPTIONS.add_missing = False
 OPTIONS.rebuild_recovery = False
 OPTIONS.replace_updated_files_list = []
-OPTIONS.replace_verity_public_key = False
-OPTIONS.replace_verity_private_key = False
 OPTIONS.is_signing = False
 
 # Use a fixed timestamp (01/01/2009 00:00:00 UTC) for files when packaging
@@ -87,6 +88,159 @@
     datetime.datetime.utcfromtimestamp(0)).total_seconds())
 
 
+def ParseAvbFooter(img_path) -> avbtool.AvbFooter:
+  with open(img_path, 'rb') as fp:
+    fp.seek(-avbtool.AvbFooter.SIZE, os.SEEK_END)
+    data = fp.read(avbtool.AvbFooter.SIZE)
+    return avbtool.AvbFooter(data)
+
+
+def GetCareMap(which, imgname):
+  """Returns the care_map string for the given partition.
+
+  Args:
+    which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
+    imgname: The filename of the image.
+
+  Returns:
+    (which, care_map_ranges): care_map_ranges is the raw string of the care_map
+    RangeSet; or None.
+  """
+  assert which in PARTITIONS_WITH_CARE_MAP
+
+  is_sparse_img = IsSparseImage(imgname)
+  unsparsed_image_size = os.path.getsize(imgname)
+
+  # A verified image contains original image + hash tree data + FEC data
+  # + AVB footer, all concatenated together. The caremap specifies a range
+  # of blocks that update_verifier should read on top of dm-verity device
+  # to verify correctness of OTA updates. When reading off of dm-verity device,
+  # the hashtree and FEC part of image isn't available. So caremap should
+  # only contain the original image blocks.
+  try:
+    avbfooter = None
+    if is_sparse_img:
+      with tempfile.NamedTemporaryFile() as tmpfile:
+        img = sparse_img.SparseImage(imgname)
+        unsparsed_image_size = img.total_blocks * img.blocksize
+        for data in img.ReadBlocks(img.total_blocks - 1, 1):
+          tmpfile.write(data)
+        tmpfile.flush()
+        avbfooter = ParseAvbFooter(tmpfile.name)
+    else:
+      avbfooter = ParseAvbFooter(imgname)
+  except LookupError as e:
+    logger.warning(
+        "Failed to parse avbfooter for partition %s image %s, %s", which, imgname, e)
+    return None
+
+  image_size = avbfooter.original_image_size
+  assert image_size < unsparsed_image_size, f"AVB footer's original image size {image_size} is larger than or equal to image size on disk {unsparsed_image_size}, this can't happen because a verified image = original image + hash tree data + FEC data + avbfooter."
+  assert image_size > 0
+
+  image_blocks = int(image_size) // 4096 - 1
+  # It's OK for image_blocks to be 0, because care map ranges are inclusive.
+  # So 0-0 means "just block 0", which is valid.
+  assert image_blocks >= 0, "blocks for {} must be non-negative, image size: {}".format(
+      which, image_size)
+
+  # For sparse images, we will only check the blocks that are listed in the care
+  # map, i.e. the ones with meaningful data.
+  if is_sparse_img:
+    simg = sparse_img.SparseImage(imgname)
+    care_map_ranges = simg.care_map.intersect(
+        rangelib.RangeSet("0-{}".format(image_blocks)))
+
+  # Otherwise for non-sparse images, we read all the blocks in the filesystem
+  # image.
+  else:
+    care_map_ranges = rangelib.RangeSet("0-{}".format(image_blocks))
+
+  return [which, care_map_ranges.to_string_raw()]
+
+
+def AddCareMapForAbOta(output_file, ab_partitions, image_paths):
+  """Generates and adds care_map.pb for a/b partition that has care_map.
+
+  Args:
+    output_file: The output zip file (needs to be already open),
+        or file path to write care_map.pb.
+    ab_partitions: The list of A/B partitions.
+    image_paths: A map from the partition name to the image path.
+  """
+  if not output_file:
+    raise ExternalError('Expected output_file for AddCareMapForAbOta')
+
+  care_map_list = []
+  for partition in ab_partitions:
+    partition = partition.strip()
+    if partition not in PARTITIONS_WITH_CARE_MAP:
+      continue
+
+    verity_block_device = "{}_verity_block_device".format(partition)
+    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
+    if (verity_block_device in OPTIONS.info_dict or
+            OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
+      if partition not in image_paths:
+        logger.warning('Potential partition with care_map missing from images: %s',
+                       partition)
+        continue
+      image_path = image_paths[partition]
+      if not os.path.exists(image_path):
+        raise ExternalError('Expected image at path {}'.format(image_path))
+
+      care_map = GetCareMap(partition, image_path)
+      if not care_map:
+        continue
+      care_map_list += care_map
+
+      # adds fingerprint field to the care_map
+      # TODO(xunchang) revisit the fingerprint calculation for care_map.
+      partition_props = OPTIONS.info_dict.get(partition + ".build.prop")
+      prop_name_list = ["ro.{}.build.fingerprint".format(partition),
+                        "ro.{}.build.thumbprint".format(partition)]
+
+      present_props = [x for x in prop_name_list if
+                       partition_props and partition_props.GetProp(x)]
+      if not present_props:
+        logger.warning(
+            "fingerprint is not present for partition %s", partition)
+        property_id, fingerprint = "unknown", "unknown"
+      else:
+        property_id = present_props[0]
+        fingerprint = partition_props.GetProp(property_id)
+      care_map_list += [property_id, fingerprint]
+
+  if not care_map_list:
+    return
+
+  # Converts the list into proto buf message by calling care_map_generator; and
+  # writes the result to a temp file.
+  temp_care_map_text = MakeTempFile(prefix="caremap_text-",
+                                           suffix=".txt")
+  with open(temp_care_map_text, 'w') as text_file:
+    text_file.write('\n'.join(care_map_list))
+
+  temp_care_map = MakeTempFile(prefix="caremap-", suffix=".pb")
+  care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
+  RunAndCheckOutput(care_map_gen_cmd)
+
+  if not isinstance(output_file, zipfile.ZipFile):
+    shutil.copy(temp_care_map, output_file)
+    return
+  # output_file is a zip file
+  care_map_path = "META/care_map.pb"
+  if care_map_path in output_file.namelist():
+    # Copy the temp file into the OPTIONS.input_tmp dir and update the
+    # replace_updated_files_list used by add_img_to_target_files
+    if not OPTIONS.replace_updated_files_list:
+      OPTIONS.replace_updated_files_list = []
+    shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
+    OPTIONS.replace_updated_files_list.append(care_map_path)
+  else:
+    ZipWrite(output_file, temp_care_map, arcname=care_map_path)
+
+
 class OutputFile(object):
   """A helper class to write a generated file to the given dir or zip.
 
@@ -279,6 +433,7 @@
       block_list=block_list)
   return img.name
 
+
 def AddSystemDlkm(output_zip):
   """Turn the contents of SystemDlkm into an system_dlkm image and store it in output_zip."""
 
@@ -457,8 +612,7 @@
 
   # Set the '_image_size' for given image size.
   is_verity_partition = "verity_block_device" in image_props
-  verity_supported = (image_props.get("verity") == "true" or
-                      image_props.get("avb_enable") == "true")
+  verity_supported = (image_props.get("avb_enable") == "true")
   is_avb_enable = image_props.get("avb_hashtree_enable") == "true"
   if verity_supported and (is_verity_partition or is_avb_enable):
     image_size = image_props.get("image_size")
@@ -557,7 +711,7 @@
   cmd = [bpttool, "make_table", "--output_json", bpt.name,
          "--output_gpt", img.name]
   input_files_str = OPTIONS.info_dict["board_bpt_input_files"]
-  input_files = input_files_str.split(" ")
+  input_files = input_files_str.split()
   for i in input_files:
     cmd.extend(["--input", i])
   disk_size = OPTIONS.info_dict.get("board_bpt_disk_size")
@@ -688,14 +842,13 @@
   SYSTEM/ after rebuilding recovery.
   """
   common.ZipDelete(zip_filename, files_list)
-  output_zip = zipfile.ZipFile(zip_filename, "a",
+  with zipfile.ZipFile(zip_filename, "a",
                                compression=zipfile.ZIP_DEFLATED,
-                               allowZip64=True)
-  for item in files_list:
-    file_path = os.path.join(OPTIONS.input_tmp, item)
-    assert os.path.exists(file_path)
-    common.ZipWrite(output_zip, file_path, arcname=item)
-  common.ZipClose(output_zip)
+                               allowZip64=True) as output_zip:
+    for item in files_list:
+      file_path = os.path.join(OPTIONS.input_tmp, item)
+      assert os.path.exists(file_path)
+      common.ZipWrite(output_zip, file_path, arcname=item)
 
 
 def HasPartition(partition_name):
@@ -783,7 +936,8 @@
   has_boot = OPTIONS.info_dict.get("no_boot") != "true"
   has_init_boot = OPTIONS.info_dict.get("init_boot") == "true"
   has_vendor_boot = OPTIONS.info_dict.get("vendor_boot") == "true"
-  has_vendor_kernel_boot = OPTIONS.info_dict.get("vendor_kernel_boot") == "true"
+  has_vendor_kernel_boot = OPTIONS.info_dict.get(
+      "vendor_kernel_boot") == "true"
 
   # {vendor,odm,product,system_ext,vendor_dlkm,odm_dlkm, system_dlkm, system, system_other}.img
   # can be built from source, or  dropped into target_files.zip as a prebuilt blob.
@@ -871,12 +1025,12 @@
 
   if has_vendor_kernel_boot:
     banner("vendor_kernel_boot")
-    vendor_kernel_boot_image = common.GetVendorBootImage(
+    vendor_kernel_boot_image = common.GetVendorKernelBootImage(
         "IMAGES/vendor_kernel_boot.img", "vendor_kernel_boot.img", OPTIONS.input_tmp,
         "VENDOR_KERNEL_BOOT")
     if vendor_kernel_boot_image:
       partitions['vendor_kernel_boot'] = os.path.join(OPTIONS.input_tmp, "IMAGES",
-                                               "vendor_kernel_boot.img")
+                                                      "vendor_kernel_boot.img")
       if not os.path.exists(partitions['vendor_kernel_boot']):
         vendor_kernel_boot_image.WriteToDir(OPTIONS.input_tmp)
         if output_zip:
@@ -1021,7 +1175,7 @@
   AddVbmetaDigest(output_zip)
 
   if output_zip:
-    common.ZipClose(output_zip)
+    output_zip.close()
     if OPTIONS.replace_updated_files_list:
       ReplaceUpdatedFiles(output_zip.filename,
                           OPTIONS.replace_updated_files_list)
@@ -1054,7 +1208,8 @@
     ZipDelete(zipfile_path, [entry.filename for entry in entries_to_store])
     with zipfile.ZipFile(zipfile_path, "a", allowZip64=True) as zfp:
       for entry in entries_to_store:
-        zfp.write(os.path.join(tmpdir, entry.filename), entry.filename, compress_type=zipfile.ZIP_STORED)
+        zfp.write(os.path.join(tmpdir, entry.filename),
+                  entry.filename, compress_type=zipfile.ZIP_STORED)
 
 
 def main(argv):
@@ -1064,9 +1219,11 @@
     elif o in ("-r", "--rebuild_recovery",):
       OPTIONS.rebuild_recovery = True
     elif o == "--replace_verity_private_key":
-      OPTIONS.replace_verity_private_key = (True, a)
+      raise ValueError("--replace_verity_private_key is no longer supported,"
+                       " please switch to AVB")
     elif o == "--replace_verity_public_key":
-      OPTIONS.replace_verity_public_key = (True, a)
+      raise ValueError("--replace_verity_public_key is no longer supported,"
+                       " please switch to AVB")
     elif o == "--is_signing":
       OPTIONS.is_signing = True
     else:
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 3f13a4a..22992e8 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -54,7 +54,7 @@
 class ApexApkSigner(object):
   """Class to sign the apk files and other files in an apex payload image and repack the apex"""
 
-  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None, fsverity_tool=None):
+  def __init__(self, apex_path, key_passwords, codename_to_api_level_map, avbtool=None, sign_tool=None):
     self.apex_path = apex_path
     if not key_passwords:
       self.key_passwords = dict()
@@ -65,9 +65,8 @@
         OPTIONS.search_path, "bin", "debugfs_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
-    self.fsverity_tool = fsverity_tool if fsverity_tool else "fsverity"
 
-  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None):
+  def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False):
     """Scans and signs the payload files and repack the apex
 
     Args:
@@ -92,7 +91,7 @@
 
     # No need to sign and repack, return the original apex path.
     if not apk_entries and not sepolicy_entries and self.sign_tool is None:
-      logger.info('No payload (apk or zip) file to sign in %s', self.apex_path)
+      logger.info('No apk file to sign in %s', self.apex_path)
       return self.apex_path
 
     for entry in apk_entries:
@@ -106,16 +105,15 @@
         logger.warning('Apk path does not contain the intended directory name:'
                        ' %s', entry)
 
-    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(apk_entries,
-        apk_keys, payload_key, sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args)
+    payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
+        apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args)
     if not has_signed_content:
-      logger.info('No contents has been signed in %s', self.apex_path)
+      logger.info('No contents have been signed in %s', self.apex_path)
       return self.apex_path
 
     return self.RepackApexPayload(payload_dir, payload_key, signing_args)
 
-  def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key,
-  sepolicy_entries, sepolicy_key, sepolicy_cert, signing_args):
+  def ExtractApexPayloadAndSignContents(self, apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args):
     """Extracts the payload image and signs the containing apk files."""
     if not os.path.exists(self.debugfs_path):
       raise ApexSigningError(
@@ -126,11 +124,11 @@
     extract_cmd = ['deapexer', '--debugfs_path',
                    self.debugfs_path, 'extract', self.apex_path, payload_dir]
     common.RunAndCheckOutput(extract_cmd)
+    assert os.path.exists(self.apex_path)
 
     has_signed_content = False
     for entry in apk_entries:
       apk_path = os.path.join(payload_dir, entry)
-      assert os.path.exists(self.apex_path)
 
       key_name = apk_keys.get(os.path.basename(entry))
       if key_name in common.SPECIAL_CERT_STRINGS:
@@ -148,9 +146,35 @@
       has_signed_content = True
 
     for entry in sepolicy_entries:
-      sepolicy_key = sepolicy_key if sepolicy_key else payload_key
-      self.SignSePolicy(payload_dir, entry, sepolicy_key, sepolicy_cert)
-      has_signed_content = True
+      sepolicy_path = os.path.join(payload_dir, entry)
+
+      if not 'etc' in entry:
+        logger.warning('Sepolicy path does not contain the intended directory name etc:'
+                       ' %s', entry)
+
+      key_name = apk_keys.get(os.path.basename(entry))
+      if key_name is None:
+        logger.warning('Failed to find signing keys for {} in'
+                       ' apex {}, payload key will be used instead.'
+                       ' Use "-e <name>=" to specify a key'
+                       .format(entry, self.apex_path))
+        key_name = payload_key
+
+      if key_name in common.SPECIAL_CERT_STRINGS:
+        logger.info('Not signing: %s due to special cert string', sepolicy_path)
+        continue
+
+      if OPTIONS.sign_sepolicy_path is not None:
+        sig_path = os.path.join(payload_dir, sepolicy_path + '.sig')
+        fsv_sig_path = os.path.join(payload_dir, sepolicy_path + '.fsv_sig')
+        old_sig = common.MakeTempFile()
+        old_fsv_sig = common.MakeTempFile()
+        os.rename(sig_path, old_sig)
+        os.rename(fsv_sig_path, old_fsv_sig)
+
+      logger.info('Signing sepolicy file %s in apex %s', sepolicy_path, self.apex_path)
+      if common.SignSePolicy(sepolicy_path, key_name, self.key_passwords.get(key_name)):
+        has_signed_content = True
 
     if self.sign_tool:
       logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
@@ -165,36 +189,6 @@
 
     return payload_dir, has_signed_content
 
-  def SignSePolicy(self, payload_dir, sepolicy_zip, sepolicy_key, sepolicy_cert):
-    sepolicy_sig = sepolicy_zip + '.sig'
-    sepolicy_fsv_sig = sepolicy_zip + '.fsv_sig'
-
-    policy_zip_path = os.path.join(payload_dir, sepolicy_zip)
-    sig_out_path = os.path.join(payload_dir, sepolicy_sig)
-    sig_old = sig_out_path + '.old'
-    if os.path.exists(sig_out_path):
-      os.rename(sig_out_path, sig_old)
-    sign_cmd = ['openssl', 'dgst', '-sign', sepolicy_key, '-keyform', 'PEM', '-sha256',
-        '-out', sig_out_path, '-binary', policy_zip_path]
-    common.RunAndCheckOutput(sign_cmd)
-    if os.path.exists(sig_old):
-      os.remove(sig_old)
-
-    if not sepolicy_cert:
-      logger.info('No cert provided for SEPolicy, skipping fsverity sign')
-      return
-
-    fsv_sig_out_path = os.path.join(payload_dir, sepolicy_fsv_sig)
-    fsv_sig_old = fsv_sig_out_path + '.old'
-    if os.path.exists(fsv_sig_out_path):
-      os.rename(fsv_sig_out_path, fsv_sig_old)
-
-    fsverity_cmd = [self.fsverity_tool, 'sign', policy_zip_path, fsv_sig_out_path,
-        '--key=' + sepolicy_key, '--cert=' + sepolicy_cert]
-    common.RunAndCheckOutput(fsverity_cmd)
-    if os.path.exists(fsv_sig_old):
-      os.remove(fsv_sig_old)
-
   def RepackApexPayload(self, payload_dir, payload_key, signing_args=None):
     """Rebuilds the apex file with the updated payload directory."""
     apex_dir = common.MakeTempDir()
@@ -366,8 +360,7 @@
 def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
                          container_pw, apk_keys, codename_to_api_level_map,
                          no_hashtree, signing_args=None, sign_tool=None,
-                         is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                         fsverity_tool=None):
+                         is_sepolicy=False):
   """Signs the current uncompressed APEX with the given payload/container keys.
 
   Args:
@@ -381,9 +374,6 @@
     signing_args: Additional args to be passed to the payload signer.
     sign_tool: A tool to sign the contents of the APEX.
     is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -392,9 +382,9 @@
   # the apex file after signing.
   apk_signer = ApexApkSigner(apex_file, container_pw,
                              codename_to_api_level_map,
-                             avbtool, sign_tool, fsverity_tool)
+                             avbtool, sign_tool)
   apex_file = apk_signer.ProcessApexFile(
-      apk_keys, payload_key, signing_args, is_sepolicy, sepolicy_key, sepolicy_cert)
+      apk_keys, payload_key, signing_args, is_sepolicy)
 
   # 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
   # payload_key.
@@ -425,7 +415,7 @@
   apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
   common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
   common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
-  common.ZipClose(apex_zip)
+  apex_zip.close()
 
   # 3. Sign the APEX container with container_key.
   signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
@@ -449,8 +439,7 @@
 def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
                        container_pw, apk_keys, codename_to_api_level_map,
                        no_hashtree, signing_args=None, sign_tool=None,
-                       is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None,
-                       fsverity_tool=None):
+                       is_sepolicy=False):
   """Signs the current compressed APEX with the given payload/container keys.
 
   Args:
@@ -463,9 +452,6 @@
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
     is_sepolicy: Indicates if the apex is a sepolicy.apex
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
 
   Returns:
     The path to the signed APEX file.
@@ -493,10 +479,7 @@
       no_hashtree,
       signing_args,
       sign_tool,
-      is_sepolicy,
-      sepolicy_key,
-      sepolicy_cert,
-      fsverity_tool)
+      is_sepolicy)
 
   # 3. Compress signed original apex.
   compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -523,9 +506,8 @@
 
 
 def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
-             apk_keys, codename_to_api_level_map,
-             no_hashtree, signing_args=None, sign_tool=None,
-             is_sepolicy=False, sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+             apk_keys, codename_to_api_level_map, no_hashtree,
+             signing_args=None, sign_tool=None, is_sepolicy=False):
   """Signs the current APEX with the given payload/container keys.
 
   Args:
@@ -537,9 +519,7 @@
     codename_to_api_level_map: A dict that maps from codename to API level.
     no_hashtree: Don't include hashtree in the signed APEX.
     signing_args: Additional args to be passed to the payload signer.
-    sepolicy_key: Key to sign a sepolicy zip.
-    sepolicy_cert: Cert to sign a sepolicy zip.
-    fsverity_tool: fsverity path to sign sepolicy zip.
+    is_sepolicy: Indicates if the apex is a sepolicy.apex
 
   Returns:
     The path to the signed APEX file.
@@ -560,32 +540,26 @@
           apex_file,
           payload_key=payload_key,
           container_key=container_key,
-          container_pw=None,
+          container_pw=container_pw,
           codename_to_api_level_map=codename_to_api_level_map,
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
           sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          is_sepolicy=is_sepolicy)
     elif apex_type == 'COMPRESSED':
       return SignCompressedApex(
           avbtool,
           apex_file,
           payload_key=payload_key,
           container_key=container_key,
-          container_pw=None,
+          container_pw=container_pw,
           codename_to_api_level_map=codename_to_api_level_map,
           no_hashtree=no_hashtree,
           apk_keys=apk_keys,
           signing_args=signing_args,
           sign_tool=sign_tool,
-          is_sepolicy=is_sepolicy,
-          sepolicy_key=sepolicy_key,
-          sepolicy_cert=sepolicy_cert,
-          fsverity_tool=fsverity_tool)
+          is_sepolicy=is_sepolicy)
     else:
       # TODO(b/172912232): support signing compressed apex
       raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index d33c2f7..211182a 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -537,14 +537,6 @@
 
     self.touched_src_sha1 = self.src.RangeSha1(self.touched_src_ranges)
 
-    if self.tgt.hashtree_info:
-      out.append("compute_hash_tree {} {} {} {} {}\n".format(
-          self.tgt.hashtree_info.hashtree_range.to_string_raw(),
-          self.tgt.hashtree_info.filesystem_range.to_string_raw(),
-          self.tgt.hashtree_info.hash_algorithm,
-          self.tgt.hashtree_info.salt,
-          self.tgt.hashtree_info.root_hash))
-
     # Zero out extended blocks as a workaround for bug 20881595.
     if self.tgt.extended:
       assert (WriteSplitTransfers(out, "zero", self.tgt.extended) ==
@@ -830,12 +822,6 @@
           assert touched[i] == 0
           touched[i] = 1
 
-    if self.tgt.hashtree_info:
-      for s, e in self.tgt.hashtree_info.hashtree_range:
-        for i in range(s, e):
-          assert touched[i] == 0
-          touched[i] = 1
-
     # Check that we've written every target block.
     for s, e in self.tgt.care_map:
       for i in range(s, e):
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index e33b581..252b1d5 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -328,9 +328,17 @@
       compressor = prop_dict["erofs_default_compressor"]
     if "erofs_compressor" in prop_dict:
       compressor = prop_dict["erofs_compressor"]
-    if compressor:
+    if compressor and compressor != "none":
       build_command.extend(["-z", compressor])
 
+    compress_hints = None
+    if "erofs_default_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_default_compress_hints"]
+    if "erofs_compress_hints" in prop_dict:
+      compress_hints = prop_dict["erofs_compress_hints"]
+    if compress_hints:
+      build_command.extend(["--compress-hints", compress_hints])
+
     build_command.extend(["--mount-point", prop_dict["mount_point"]])
     if target_out:
       build_command.extend(["--product-out", target_out])
@@ -348,6 +356,8 @@
       build_command.extend(["-C", prop_dict["erofs_pcluster_size"]])
     if "erofs_share_dup_blocks" in prop_dict:
       build_command.extend(["--chunksize", "4096"])
+    if "erofs_use_legacy_compression" in prop_dict:
+      build_command.extend(["-E", "legacy-compress"])
 
     build_command.extend([out_file, in_dir])
     if "erofs_sparse_flag" in prop_dict and not disable_sparse:
@@ -355,7 +365,7 @@
 
     run_fsck = RunErofsFsck
   elif fs_type.startswith("squash"):
-    build_command = ["mksquashfsimage.sh"]
+    build_command = ["mksquashfsimage"]
     build_command.extend([in_dir, out_file])
     if "squashfs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["squashfs_sparse_flag"]])
@@ -377,7 +387,7 @@
     if prop_dict.get("squashfs_disable_4k_align") == "true":
       build_command.extend(["-a"])
   elif fs_type.startswith("f2fs"):
-    build_command = ["mkf2fsuserimg.sh"]
+    build_command = ["mkf2fsuserimg"]
     build_command.extend([out_file, prop_dict["image_size"]])
     if "f2fs_sparse_flag" in prop_dict and not disable_sparse:
       build_command.extend([prop_dict["f2fs_sparse_flag"]])
@@ -400,7 +410,7 @@
       build_command.append("--casefold")
     if (needs_compress or prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--compression")
-    if (prop_dict.get("mount_point") != "data"):
+    if "ro_mount_point" in prop_dict:
       build_command.append("--readonly")
     if (prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--sldc")
@@ -650,20 +660,17 @@
   common_props = (
       "extfs_sparse_flag",
       "erofs_default_compressor",
+      "erofs_default_compress_hints",
       "erofs_pcluster_size",
       "erofs_share_dup_blocks",
       "erofs_sparse_flag",
+      "erofs_use_legacy_compression",
       "squashfs_sparse_flag",
       "system_f2fs_compress",
       "system_f2fs_sldc_flags",
       "f2fs_sparse_flag",
       "skip_fsck",
       "ext_mkuserimg",
-      "verity",
-      "verity_key",
-      "verity_signer_cmd",
-      "verity_fec",
-      "verity_disable",
       "avb_enable",
       "avb_avbtool",
       "use_dynamic_partition_size",
@@ -698,10 +705,12 @@
       (True, "avb_{}_hashtree_enable", "avb_hashtree_enable"),
       (True, "avb_{}_key_path", "avb_key_path"),
       (True, "avb_{}_salt", "avb_salt"),
+      (True, "erofs_use_legacy_compression", "erofs_use_legacy_compression"),
       (True, "ext4_share_dup_blocks", "ext4_share_dup_blocks"),
       (True, "{}_base_fs_file", "base_fs_file"),
       (True, "{}_disable_sparse", "disable_sparse"),
       (True, "{}_erofs_compressor", "erofs_compressor"),
+      (True, "{}_erofs_compress_hints", "erofs_compress_hints"),
       (True, "{}_erofs_pcluster_size", "erofs_pcluster_size"),
       (True, "{}_erofs_share_dup_blocks", "erofs_share_dup_blocks"),
       (True, "{}_extfs_inode_count", "extfs_inode_count"),
@@ -748,6 +757,8 @@
     if not copy_prop(prop, "extfs_rsv_pct"):
       d["extfs_rsv_pct"] = "0"
 
+    d["ro_mount_point"] = "1"
+
   # Copy partition-specific properties.
   d["mount_point"] = mount_point
   if mount_point == "system":
@@ -810,16 +821,18 @@
 
 
 def main(argv):
-  if len(argv) != 4:
+  args = common.ParseOptions(argv, __doc__)
+
+  if len(args) != 4:
     print(__doc__)
     sys.exit(1)
 
   common.InitLogging()
 
-  in_dir = argv[0]
-  glob_dict_file = argv[1]
-  out_file = argv[2]
-  target_out = argv[3]
+  in_dir = args[0]
+  glob_dict_file = args[1]
+  out_file = args[2]
+  target_out = args[3]
 
   glob_dict = LoadGlobalDict(glob_dict_file)
   if "mount_point" in glob_dict:
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index b395c19..97957be 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
   """Verifies the payload and metadata signatures in an A/B OTA payload."""
   package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
   if 'payload.bin' not in package_zip.namelist():
-    common.ZipClose(package_zip)
+    package_zip.close()
     return
 
   print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
   common.RunAndCheckOutput(cmd)
-  common.ZipClose(package_zip)
+  package_zip.close()
 
   # Verified successfully upon reaching here.
   print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/check_target_files_signatures b/tools/releasetools/check_target_files_signatures
deleted file mode 120000
index 9f62aa3..0000000
--- a/tools/releasetools/check_target_files_signatures
+++ /dev/null
@@ -1 +0,0 @@
-check_target_files_signatures.py
\ No newline at end of file
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index 4a2a905..b32b85c 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -22,13 +22,16 @@
 target_files can be a ZIP file or an extracted target files directory.
 """
 
+import json
 import logging
+import os
+import shutil
 import subprocess
 import sys
-import os
 import zipfile
 
 import common
+from apex_manifest import ParseApexManifest
 
 logger = logging.getLogger(__name__)
 
@@ -123,7 +126,12 @@
     logger.warning('PRODUCT_ENFORCE_VINTF_MANIFEST is not set, skipping checks')
     return True
 
+
   dirmap = GetDirmap(input_tmp)
+
+  apex_root, apex_info_file = PrepareApexDirectory(input_tmp)
+  dirmap['/apex'] = apex_root
+
   args_for_skus = GetArgsForSkus(info_dict)
   shipping_api_level_args = GetArgsForShippingApiLevel(info_dict)
   kernel_args = GetArgsForKernel(input_tmp)
@@ -132,6 +140,8 @@
       'checkvintf',
       '--check-compat',
   ]
+  common_command += ['--apex-info-file', apex_info_file]
+
   for device_path, real_path in sorted(dirmap.items()):
     common_command += ['--dirmap', '{}:{}'.format(device_path, real_path)]
   common_command += kernel_args
@@ -142,9 +152,10 @@
     command = common_command + sku_args
     proc = common.Run(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
     out, err = proc.communicate()
+    last_out_line = out.split()[-1] if out != "" else out
     if proc.returncode == 0:
       logger.info("Command `%s` returns 'compatible'", ' '.join(command))
-    elif out.strip() == "INCOMPATIBLE":
+    elif last_out_line.strip() == "INCOMPATIBLE":
       logger.info("Command `%s` returns 'incompatible'", ' '.join(command))
       success = False
     else:
@@ -185,6 +196,112 @@
   paths = sum((PathToPatterns(path) for path in paths if path), [])
   return paths
 
+def GetVintfApexUnzipPatterns():
+  """ Build unzip pattern for APEXes. """
+  patterns = []
+  for target_files_rel_paths in DIR_SEARCH_PATHS.values():
+    for target_files_rel_path in target_files_rel_paths:
+      patterns.append(os.path.join(target_files_rel_path,"apex/*"))
+
+  return patterns
+
+def PrepareApexDirectory(inp):
+  """ Prepare the APEX data.
+
+  Apex binaries do not support dirmaps, in order to use these binaries we
+  need to move the APEXes from the extracted target file archives to the
+  expected device locations.
+
+  The APEXes will also be extracted under the APEX/ directory
+  matching what would be on the target.
+
+  Create the following structure under the input inp directory:
+       APEX/apex             # Extracted APEXes
+       APEX/system/apex/     # System APEXes
+       APEX/vendor/apex/     # Vendor APEXes
+       ...
+
+  Args:
+    inp: path to the directory that contains the extracted target files archive.
+
+  Returns:
+    extracted apex directory
+    apex-info-list.xml file
+  """
+
+  deapexer = 'deapexer'
+  debugfs_path = 'debugfs'
+  blkid_path = 'blkid'
+  fsckerofs_path = 'fsck.erofs'
+  if OPTIONS.search_path:
+    debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
+    deapexer_path = os.path.join(OPTIONS.search_path, 'bin', 'deapexer')
+    blkid_path = os.path.join(OPTIONS.search_path, 'bin', 'blkid')
+    fsckerofs_path = os.path.join(OPTIONS.search_path, 'bin', 'fsck.erofs')
+    if os.path.isfile(deapexer_path):
+      deapexer = deapexer_path
+
+  def ExtractApexes(path, outp):
+    # Extract all APEXes found in input path.
+    logger.info('Extracting APEXs in %s', path)
+    for f in os.listdir(path):
+      logger.info('  adding APEX %s', os.path.basename(f))
+      apex = os.path.join(path, f)
+      if os.path.isdir(apex) and os.path.isfile(os.path.join(apex, 'apex_manifest.pb')):
+        info = ParseApexManifest(os.path.join(apex, 'apex_manifest.pb'))
+        # Flattened APEXes may have symlinks for libs (linked to /system/lib)
+        # We need to blindly copy them all.
+        shutil.copytree(apex, os.path.join(outp, info.name), symlinks=True)
+      elif os.path.isfile(apex) and apex.endswith(('.apex', '.capex')):
+        cmd = [deapexer,
+               '--debugfs_path', debugfs_path,
+               'info',
+               apex]
+        info = json.loads(common.RunAndCheckOutput(cmd))
+
+        cmd = [deapexer,
+               '--debugfs_path', debugfs_path,
+               '--fsckerofs_path', fsckerofs_path,
+               '--blkid_path', blkid_path,
+               'extract',
+               apex,
+               os.path.join(outp, info['name'])]
+        common.RunAndCheckOutput(cmd)
+      else:
+        logger.info('  .. skipping %s (is it APEX?)', path)
+
+  root_dir_name = 'APEX'
+  root_dir = os.path.join(inp, root_dir_name)
+  extracted_root = os.path.join(root_dir, 'apex')
+  apex_info_file = os.path.join(extracted_root, 'apex-info-list.xml')
+
+  # Always create APEX directory for dirmap
+  os.makedirs(extracted_root)
+
+  create_info_file = False
+
+  # Loop through search path looking for and processing apex/ directories.
+  for device_path, target_files_rel_paths in DIR_SEARCH_PATHS.items():
+    for target_files_rel_path in target_files_rel_paths:
+      inp_partition = os.path.join(inp, target_files_rel_path,"apex")
+      if os.path.exists(inp_partition):
+        apex_dir = root_dir + os.path.join(device_path + "/apex");
+        os.makedirs(root_dir + device_path)
+        shutil.copytree(inp_partition, apex_dir, symlinks=True)
+        ExtractApexes(apex_dir, extracted_root)
+        create_info_file = True
+
+  if create_info_file:
+    ### Create apex-info-list.xml
+    dump_cmd = ['dump_apex_info',
+                '--root_dir', root_dir,
+                '--out_file', apex_info_file]
+    common.RunAndCheckOutput(dump_cmd)
+    if not os.path.exists(apex_info_file):
+      raise RuntimeError('Failed to create apex info file %s', apex_info_file)
+    logger.info('Created %s', apex_info_file)
+
+  return extracted_root, apex_info_file
 
 def CheckVintfFromTargetFiles(inp, info_dict=None):
   """
@@ -198,7 +315,7 @@
     True if VINTF check is skipped or compatible, False if incompatible. Raise
     a RuntimeError if any error occurs.
   """
-  input_tmp = common.UnzipTemp(inp, GetVintfFileList() + UNZIP_PATTERN)
+  input_tmp = common.UnzipTemp(inp, GetVintfFileList() + GetVintfApexUnzipPatterns() + UNZIP_PATTERN)
   return CheckVintfFromExtractedTargetFiles(input_tmp, info_dict)
 
 
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index bd3af68..2f05d44 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -20,6 +20,7 @@
 import datetime
 import errno
 import fnmatch
+from genericpath import isdir
 import getopt
 import getpass
 import gzip
@@ -72,18 +73,16 @@
       if "ANDROID_HOST_OUT" in os.environ:
         self.search_path = os.environ["ANDROID_HOST_OUT"]
     self.signapk_shared_library_path = "lib64"   # Relative to search_path
+    self.sign_sepolicy_path = None
     self.extra_signapk_args = []
+    self.extra_sign_sepolicy_args = []
     self.aapt2_path = "aapt2"
     self.java_path = "java"  # Use the one on the path by default.
-    self.java_args = ["-Xmx2048m"]  # The default JVM args.
+    self.java_args = ["-Xmx4096m"]  # The default JVM args.
     self.android_jar_path = None
     self.public_key_suffix = ".x509.pem"
     self.private_key_suffix = ".pk8"
     # use otatools built boot_signer by default
-    self.boot_signer_path = "boot_signer"
-    self.boot_signer_args = []
-    self.verity_signer_path = None
-    self.verity_signer_args = []
     self.verbose = False
     self.tempfiles = []
     self.device_specific = None
@@ -114,7 +113,7 @@
 # AVB_FOOTER_ARGS_BY_PARTITION in sign_target_files_apks need to be updated
 # accordingly.
 AVB_PARTITIONS = ('boot', 'init_boot', 'dtbo', 'odm', 'product', 'pvmfw', 'recovery',
-                  'system', 'system_ext', 'vendor', 'vendor_boot',
+                  'system', 'system_ext', 'vendor', 'vendor_boot', 'vendor_kernel_boot',
                   'vendor_dlkm', 'odm_dlkm', 'system_dlkm')
 
 # Chained VBMeta partitions.
@@ -455,6 +454,11 @@
     return vabc_enabled
 
   @property
+  def is_android_r(self):
+    system_prop = self.info_dict.get("system.build.prop")
+    return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
+
+  @property
   def is_vabc_xor(self):
     vendor_prop = self.info_dict.get("vendor.build.prop")
     vabc_xor_enabled = vendor_prop and \
@@ -696,7 +700,13 @@
   """Reads the contents of fn from input zipfile or directory."""
   if isinstance(input_file, zipfile.ZipFile):
     return input_file.read(fn).decode()
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      return zfp.read(fn).decode()
   else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
     path = os.path.join(input_file, *fn.split("/"))
     try:
       with open(path) as f:
@@ -713,7 +723,16 @@
     with open(tmp_file, 'wb') as f:
       f.write(input_file.read(fn))
     return tmp_file
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      tmp_file = MakeTempFile(os.path.basename(fn))
+      with open(tmp_file, "wb") as fp:
+        fp.write(zfp.read(fn))
+      return tmp_file
   else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
     file = os.path.join(input_file, *fn.split("/"))
     if not os.path.exists(file):
       raise KeyError(fn)
@@ -725,7 +744,7 @@
   GZ = 2
 
 
-def _GetRamdiskFormat(info_dict):
+def GetRamdiskFormat(info_dict):
   if info_dict.get('lz4_ramdisks') == 'true':
     ramdisk_format = RamdiskFormat.LZ4
   else:
@@ -834,7 +853,7 @@
 
   # Load recovery fstab if applicable.
   d["fstab"] = _FindAndLoadRecoveryFstab(d, input_file, read_helper)
-  ramdisk_format = _GetRamdiskFormat(d)
+  ramdisk_format = GetRamdiskFormat(d)
 
   # Tries to load the build props for all partitions with care_map, including
   # system and vendor.
@@ -854,6 +873,10 @@
         d["avb_{}_salt".format(partition)] = sha256(
             fingerprint.encode()).hexdigest()
 
+    # Set up the salt for partitions without build.prop
+    if build_info.fingerprint:
+      d["avb_salt"] = sha256(build_info.fingerprint.encode()).hexdigest()
+
     # Set the vbmeta digest if exists
     try:
       d["vbmeta_digest"] = read_helper("META/vbmeta_digest.txt").rstrip()
@@ -1048,6 +1071,13 @@
     return {key: val for key, val in d.items()
             if key in self.props_allow_override}
 
+  def __getstate__(self):
+    state = self.__dict__.copy()
+    # Don't pickle baz
+    if "input_file" in state and isinstance(state["input_file"], zipfile.ZipFile):
+      state["input_file"] = state["input_file"].filename
+    return state
+
   def GetProp(self, prop):
     return self.build_props.get(prop)
 
@@ -1182,8 +1212,8 @@
   """
 
   def uniq_concat(a, b):
-    combined = set(a.split(" "))
-    combined.update(set(b.split(" ")))
+    combined = set(a.split())
+    combined.update(set(b.split()))
     combined = [item.strip() for item in combined if item.strip()]
     return " ".join(sorted(combined))
 
@@ -1192,6 +1222,10 @@
     raise ValueError("Both dictionaries must have use_dynamic_partitions=true")
 
   merged_dict = {"use_dynamic_partitions": "true"}
+  # For keys-value pairs that are the same, copy to merged dict
+  for key in vendor_dict.keys():
+    if key in framework_dict and framework_dict[key] == vendor_dict[key]:
+      merged_dict[key] = vendor_dict[key]
 
   merged_dict["dynamic_partition_list"] = uniq_concat(
       framework_dict.get("dynamic_partition_list", ""),
@@ -1200,7 +1234,7 @@
   # Super block devices are defined by the vendor dict.
   if "super_block_devices" in vendor_dict:
     merged_dict["super_block_devices"] = vendor_dict["super_block_devices"]
-    for block_device in merged_dict["super_block_devices"].split(" "):
+    for block_device in merged_dict["super_block_devices"].split():
       key = "super_%s_device_size" % block_device
       if key not in vendor_dict:
         raise ValueError("Vendor dict does not contain required key %s." % key)
@@ -1209,7 +1243,7 @@
   # Partition groups and group sizes are defined by the vendor dict because
   # these values may vary for each board that uses a shared system image.
   merged_dict["super_partition_groups"] = vendor_dict["super_partition_groups"]
-  for partition_group in merged_dict["super_partition_groups"].split(" "):
+  for partition_group in merged_dict["super_partition_groups"].split():
     # Set the partition group's size using the value from the vendor dict.
     key = "super_%s_group_size" % partition_group
     if key not in vendor_dict:
@@ -1575,7 +1609,7 @@
   img = tempfile.NamedTemporaryFile()
 
   if has_ramdisk:
-    ramdisk_format = _GetRamdiskFormat(info_dict)
+    ramdisk_format = GetRamdiskFormat(info_dict)
     ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
                                ramdisk_format=ramdisk_format)
 
@@ -1671,23 +1705,8 @@
     with open(img.name, 'ab') as f:
       f.write(boot_signature_bytes)
 
-  if (info_dict.get("boot_signer") == "true" and
-          info_dict.get("verity_key")):
-    # Hard-code the path as "/boot" for two-step special recovery image (which
-    # will be loaded into /boot during the two-step OTA).
-    if two_step_image:
-      path = "/boot"
-    else:
-      path = "/" + partition_name
-    cmd = [OPTIONS.boot_signer_path]
-    cmd.extend(OPTIONS.boot_signer_args)
-    cmd.extend([path, img.name,
-                info_dict["verity_key"] + ".pk8",
-                info_dict["verity_key"] + ".x509.pem", img.name])
-    RunAndCheckOutput(cmd)
-
   # Sign the image if vboot is non-empty.
-  elif info_dict.get("vboot"):
+  if info_dict.get("vboot"):
     path = "/" + partition_name
     img_keyblock = tempfile.NamedTemporaryFile()
     # We have switched from the prebuilt futility binary to using the tool
@@ -1782,6 +1801,9 @@
   if info_dict.get("recovery_as_boot") == "true":
     return True  # the recovery-as-boot boot.img has a RECOVERY ramdisk.
 
+  if info_dict.get("gki_boot_image_without_ramdisk") == "true":
+    return False  # A GKI boot.img has no ramdisk since Android-13.
+
   if info_dict.get("system_root_image") == "true":
     # The ramdisk content is merged into the system.img, so there is NO
     # ramdisk in the boot.img or boot-<kernel version>.img.
@@ -1838,7 +1860,7 @@
   return None
 
 
-def _BuildVendorBootImage(sourcedir, info_dict=None):
+def _BuildVendorBootImage(sourcedir, partition_name, info_dict=None):
   """Build a vendor boot image from the specified sourcedir.
 
   Take a ramdisk, dtb, and vendor_cmdline from the input (in 'sourcedir'), and
@@ -1853,7 +1875,7 @@
 
   img = tempfile.NamedTemporaryFile()
 
-  ramdisk_format = _GetRamdiskFormat(info_dict)
+  ramdisk_format = GetRamdiskFormat(info_dict)
   ramdisk_img = _MakeRamdisk(sourcedir, ramdisk_format=ramdisk_format)
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
@@ -1863,8 +1885,14 @@
 
   fn = os.path.join(sourcedir, "dtb")
   if os.access(fn, os.F_OK):
-    cmd.append("--dtb")
-    cmd.append(fn)
+    has_vendor_kernel_boot = (info_dict.get(
+        "vendor_kernel_boot", "").lower() == "true")
+
+    # Pack dtb into vendor_kernel_boot if building vendor_kernel_boot.
+    # Otherwise pack dtb into vendor_boot.
+    if not has_vendor_kernel_boot or partition_name == "vendor_kernel_boot":
+      cmd.append("--dtb")
+      cmd.append(fn)
 
   fn = os.path.join(sourcedir, "vendor_cmdline")
   if os.access(fn, os.F_OK):
@@ -1924,11 +1952,11 @@
   # AVB: if enabled, calculate and add hash.
   if info_dict.get("avb_enable") == "true":
     avbtool = info_dict["avb_avbtool"]
-    part_size = info_dict["vendor_boot_size"]
+    part_size = info_dict[f'{partition_name}_size']
     cmd = [avbtool, "add_hash_footer", "--image", img.name,
-           "--partition_size", str(part_size), "--partition_name", "vendor_boot"]
-    AppendAVBSigningArgs(cmd, "vendor_boot")
-    args = info_dict.get("avb_vendor_boot_add_hash_footer_args")
+           "--partition_size", str(part_size), "--partition_name", partition_name]
+    AppendAVBSigningArgs(cmd, partition_name)
+    args = info_dict.get(f'avb_{partition_name}_add_hash_footer_args')
     if args and args.strip():
       cmd.extend(shlex.split(args))
     RunAndCheckOutput(cmd)
@@ -1962,7 +1990,31 @@
     info_dict = OPTIONS.info_dict
 
   data = _BuildVendorBootImage(
-      os.path.join(unpack_dir, tree_subdir), info_dict)
+      os.path.join(unpack_dir, tree_subdir), "vendor_boot", info_dict)
+  if data:
+    return File(name, data)
+  return None
+
+
+def GetVendorKernelBootImage(name, prebuilt_name, unpack_dir, tree_subdir,
+                             info_dict=None):
+  """Return a File object with the desired vendor kernel boot image.
+
+  Look for it under 'unpack_dir'/IMAGES, otherwise construct it from
+  the source files in 'unpack_dir'/'tree_subdir'."""
+
+  prebuilt_path = os.path.join(unpack_dir, "IMAGES", prebuilt_name)
+  if os.path.exists(prebuilt_path):
+    logger.info("using prebuilt %s from IMAGES...", prebuilt_name)
+    return File.FromLocalFile(name, prebuilt_path)
+
+  logger.info("building image from target_files %s...", tree_subdir)
+
+  if info_dict is None:
+    info_dict = OPTIONS.info_dict
+
+  data = _BuildVendorBootImage(
+      os.path.join(unpack_dir, tree_subdir), "vendor_kernel_boot", info_dict)
   if data:
     return File(name, data)
   return None
@@ -2030,7 +2082,6 @@
 def GetUserImage(which, tmpdir, input_zip,
                  info_dict=None,
                  allow_shared_blocks=None,
-                 hashtree_info_generator=None,
                  reset_file_map=False):
   """Returns an Image object suitable for passing to BlockImageDiff.
 
@@ -2047,8 +2098,6 @@
     info_dict: The dict to be looked up for relevant info.
     allow_shared_blocks: If image is sparse, whether having shared blocks is
         allowed. If none, it is looked up from info_dict.
-    hashtree_info_generator: If present and image is sparse, generates the
-        hashtree_info for this sparse image.
     reset_file_map: If true and image is sparse, reset file map before returning
         the image.
   Returns:
@@ -2070,15 +2119,14 @@
     allow_shared_blocks = info_dict.get("ext4_share_dup_blocks") == "true"
 
   if is_sparse:
-    img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
-                         hashtree_info_generator)
+    img = GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks)
     if reset_file_map:
       img.ResetFileMap()
     return img
-  return GetNonSparseImage(which, tmpdir, hashtree_info_generator)
+  return GetNonSparseImage(which, tmpdir)
 
 
-def GetNonSparseImage(which, tmpdir, hashtree_info_generator=None):
+def GetNonSparseImage(which, tmpdir):
   """Returns a Image object suitable for passing to BlockImageDiff.
 
   This function loads the specified non-sparse image from the given path.
@@ -2096,11 +2144,10 @@
   # ota_from_target_files.py (since LMP).
   assert os.path.exists(path) and os.path.exists(mappath)
 
-  return images.FileImage(path, hashtree_info_generator=hashtree_info_generator)
+  return images.FileImage(path)
 
 
-def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks,
-                   hashtree_info_generator=None):
+def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
   """Returns a SparseImage object suitable for passing to BlockImageDiff.
 
   This function loads the specified sparse image from the given path, and
@@ -2113,8 +2160,6 @@
     tmpdir: The directory that contains the prebuilt image and block map file.
     input_zip: The target-files ZIP archive.
     allow_shared_blocks: Whether having shared blocks is allowed.
-    hashtree_info_generator: If present, generates the hashtree_info for this
-        sparse image.
   Returns:
     A SparseImage object, with file_map info loaded.
   """
@@ -2131,8 +2176,7 @@
   clobbered_blocks = "0"
 
   image = sparse_img.SparseImage(
-      path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks,
-      hashtree_info_generator=hashtree_info_generator)
+      path, mappath, clobbered_blocks, allow_shared_blocks=allow_shared_blocks)
 
   # block.map may contain less blocks, because mke2fs may skip allocating blocks
   # if they contain all zeros. We can't reconstruct such a file from its block
@@ -2336,8 +2380,40 @@
   stdoutdata, _ = proc.communicate(password)
   if proc.returncode != 0:
     raise ExternalError(
-        "Failed to run signapk.jar: return code {}:\n{}".format(
+        "Failed to run {}: return code {}:\n{}".format(cmd,
+                                                       proc.returncode, stdoutdata))
+
+
+def SignSePolicy(sepolicy, key, password):
+  """Sign the sepolicy zip, producing an fsverity .fsv_sig and
+  an RSA .sig signature files.
+  """
+
+  if OPTIONS.sign_sepolicy_path is None:
+    logger.info("No sign_sepolicy_path specified, %s was not signed", sepolicy)
+    return False
+
+  java_library_path = os.path.join(
+      OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
+
+  cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
+         ["-Djava.library.path=" + java_library_path,
+          "-jar", os.path.join(OPTIONS.search_path, OPTIONS.sign_sepolicy_path)] +
+         OPTIONS.extra_sign_sepolicy_args)
+
+  cmd.extend([key + OPTIONS.public_key_suffix,
+              key + OPTIONS.private_key_suffix,
+              sepolicy, os.path.dirname(sepolicy)])
+
+  proc = Run(cmd, stdin=subprocess.PIPE)
+  if password is not None:
+    password += "\n"
+  stdoutdata, _ = proc.communicate(password)
+  if proc.returncode != 0:
+    raise ExternalError(
+        "Failed to run sign sepolicy: return code {}:\n{}".format(
             proc.returncode, stdoutdata))
+  return True
 
 
 def CheckSize(data, target, info_dict):
@@ -2515,7 +2591,8 @@
     opts, args = getopt.getopt(
         argv, "hvp:s:x:" + extra_opts,
         ["help", "verbose", "path=", "signapk_path=",
-         "signapk_shared_library_path=", "extra_signapk_args=", "aapt2_path=",
+         "signapk_shared_library_path=", "extra_signapk_args=",
+         "sign_sepolicy_path=", "extra_sign_sepolicy_args=", "aapt2_path=",
          "java_path=", "java_args=", "android_jar_path=", "public_key_suffix=",
          "private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
          "verity_signer_path=", "verity_signer_args=", "device_specific=",
@@ -2539,6 +2616,10 @@
       OPTIONS.signapk_shared_library_path = a
     elif o in ("--extra_signapk_args",):
       OPTIONS.extra_signapk_args = shlex.split(a)
+    elif o in ("--sign_sepolicy_path",):
+      OPTIONS.sign_sepolicy_path = a
+    elif o in ("--extra_sign_sepolicy_args",):
+      OPTIONS.extra_sign_sepolicy_args = shlex.split(a)
     elif o in ("--aapt2_path",):
       OPTIONS.aapt2_path = a
     elif o in ("--java_path",):
@@ -2552,13 +2633,17 @@
     elif o in ("--private_key_suffix",):
       OPTIONS.private_key_suffix = a
     elif o in ("--boot_signer_path",):
-      OPTIONS.boot_signer_path = a
+      raise ValueError(
+          "--boot_signer_path is no longer supported, please switch to AVB")
     elif o in ("--boot_signer_args",):
-      OPTIONS.boot_signer_args = shlex.split(a)
+      raise ValueError(
+          "--boot_signer_args is no longer supported, please switch to AVB")
     elif o in ("--verity_signer_path",):
-      OPTIONS.verity_signer_path = a
+      raise ValueError(
+          "--verity_signer_path is no longer supported, please switch to AVB")
     elif o in ("--verity_signer_args",):
-      OPTIONS.verity_signer_args = shlex.split(a)
+      raise ValueError(
+          "--verity_signer_args is no longer supported, please switch to AVB")
     elif o in ("-s", "--device_specific"):
       OPTIONS.device_specific = a
     elif o in ("-x", "--extra"):
@@ -2712,18 +2797,6 @@
 def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
              compress_type=None):
 
-  # http://b/18015246
-  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
-  # for files larger than 2GiB. We can work around this by adjusting their
-  # limit. Note that `zipfile.writestr()` will not work for strings larger than
-  # 2GiB. The Python interpreter sometimes rejects strings that large (though
-  # it isn't clear to me exactly what circumstances cause this).
-  # `zipfile.write()` must be used directly to work around this.
-  #
-  # This mess can be avoided if we port to python3.
-  saved_zip64_limit = zipfile.ZIP64_LIMIT
-  zipfile.ZIP64_LIMIT = (1 << 32) - 1
-
   if compress_type is None:
     compress_type = zip_file.compression
   if arcname is None:
@@ -2749,14 +2822,13 @@
   finally:
     os.chmod(filename, saved_stat.st_mode)
     os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
-    zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
 def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
-  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
+  Python's zip implementation won't allow writing a string
   longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
   when calling crc32(bytes).
 
@@ -2765,9 +2837,6 @@
   when we know the string won't be too long.
   """
 
-  saved_zip64_limit = zipfile.ZIP64_LIMIT
-  zipfile.ZIP64_LIMIT = (1 << 32) - 1
-
   if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
     zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
     zinfo.compress_type = zip_file.compression
@@ -2800,41 +2869,37 @@
   zinfo.date_time = (2009, 1, 1, 0, 0, 0)
 
   zip_file.writestr(zinfo, data)
-  zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
-def ZipDelete(zip_filename, entries):
+def ZipDelete(zip_filename, entries, force=False):
   """Deletes entries from a ZIP file.
 
-  Since deleting entries from a ZIP file is not supported, it shells out to
-  'zip -d'.
-
   Args:
     zip_filename: The name of the ZIP file.
     entries: The name of the entry, or the list of names to be deleted.
-
-  Raises:
-    AssertionError: In case of non-zero return from 'zip'.
   """
   if isinstance(entries, str):
     entries = [entries]
   # If list is empty, nothing to do
   if not entries:
     return
-  cmd = ["zip", "-d", zip_filename] + entries
-  RunAndCheckOutput(cmd)
 
+  with zipfile.ZipFile(zip_filename, 'r') as zin:
+    if not force and len(set(zin.namelist()).intersection(entries)) == 0:
+      raise ExternalError(
+          "Failed to delete zip entries, name not matched: %s" % entries)
 
-def ZipClose(zip_file):
-  # http://b/18015246
-  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
-  # central directory.
-  saved_zip64_limit = zipfile.ZIP64_LIMIT
-  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+    fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
+    os.close(fd)
 
-  zip_file.close()
+    with zipfile.ZipFile(new_zipfile, 'w') as zout:
+      for item in zin.infolist():
+        if item.filename in entries:
+          continue
+        buffer = zin.read(item.filename)
+        zout.writestr(item, buffer)
 
-  zipfile.ZIP64_LIMIT = saved_zip64_limit
+  os.replace(new_zipfile, zip_filename)
 
 
 class DeviceSpecificParams(object):
@@ -3380,7 +3445,8 @@
     "ext4": "EMMC",
     "emmc": "EMMC",
     "f2fs": "EMMC",
-    "squashfs": "EMMC"
+    "squashfs": "EMMC",
+    "erofs": "EMMC"
 }
 
 
@@ -3915,133 +3981,9 @@
     return None
 
 
-def GetCareMap(which, imgname):
-  """Returns the care_map string for the given partition.
-
-  Args:
-    which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
-    imgname: The filename of the image.
-
-  Returns:
-    (which, care_map_ranges): care_map_ranges is the raw string of the care_map
-    RangeSet; or None.
-  """
-  assert which in PARTITIONS_WITH_CARE_MAP
-
-  # which + "_image_size" contains the size that the actual filesystem image
-  # resides in, which is all that needs to be verified. The additional blocks in
-  # the image file contain verity metadata, by reading which would trigger
-  # invalid reads.
-  image_size = OPTIONS.info_dict.get(which + "_image_size")
-  if not image_size:
-    return None
-
-  disable_sparse = OPTIONS.info_dict.get(which + "_disable_sparse")
-
-  image_blocks = int(image_size) // 4096 - 1
-  # It's OK for image_blocks to be 0, because care map ranges are inclusive.
-  # So 0-0 means "just block 0", which is valid.
-  assert image_blocks >= 0, "blocks for {} must be non-negative, image size: {}".format(
-      which, image_size)
-
-  # For sparse images, we will only check the blocks that are listed in the care
-  # map, i.e. the ones with meaningful data.
-  if "extfs_sparse_flag" in OPTIONS.info_dict and not disable_sparse:
-    simg = sparse_img.SparseImage(imgname)
-    care_map_ranges = simg.care_map.intersect(
-        rangelib.RangeSet("0-{}".format(image_blocks)))
-
-  # Otherwise for non-sparse images, we read all the blocks in the filesystem
-  # image.
-  else:
-    care_map_ranges = rangelib.RangeSet("0-{}".format(image_blocks))
-
-  return [which, care_map_ranges.to_string_raw()]
-
-
-def AddCareMapForAbOta(output_file, ab_partitions, image_paths):
-  """Generates and adds care_map.pb for a/b partition that has care_map.
-
-  Args:
-    output_file: The output zip file (needs to be already open),
-        or file path to write care_map.pb.
-    ab_partitions: The list of A/B partitions.
-    image_paths: A map from the partition name to the image path.
-  """
-  if not output_file:
-    raise ExternalError('Expected output_file for AddCareMapForAbOta')
-
-  care_map_list = []
-  for partition in ab_partitions:
-    partition = partition.strip()
-    if partition not in PARTITIONS_WITH_CARE_MAP:
-      continue
-
-    verity_block_device = "{}_verity_block_device".format(partition)
-    avb_hashtree_enable = "avb_{}_hashtree_enable".format(partition)
-    if (verity_block_device in OPTIONS.info_dict or
-            OPTIONS.info_dict.get(avb_hashtree_enable) == "true"):
-      if partition not in image_paths:
-        logger.warning('Potential partition with care_map missing from images: %s',
-                       partition)
-        continue
-      image_path = image_paths[partition]
-      if not os.path.exists(image_path):
-        raise ExternalError('Expected image at path {}'.format(image_path))
-
-      care_map = GetCareMap(partition, image_path)
-      if not care_map:
-        continue
-      care_map_list += care_map
-
-      # adds fingerprint field to the care_map
-      # TODO(xunchang) revisit the fingerprint calculation for care_map.
-      partition_props = OPTIONS.info_dict.get(partition + ".build.prop")
-      prop_name_list = ["ro.{}.build.fingerprint".format(partition),
-                        "ro.{}.build.thumbprint".format(partition)]
-
-      present_props = [x for x in prop_name_list if
-                       partition_props and partition_props.GetProp(x)]
-      if not present_props:
-        logger.warning(
-            "fingerprint is not present for partition %s", partition)
-        property_id, fingerprint = "unknown", "unknown"
-      else:
-        property_id = present_props[0]
-        fingerprint = partition_props.GetProp(property_id)
-      care_map_list += [property_id, fingerprint]
-
-  if not care_map_list:
-    return
-
-  # Converts the list into proto buf message by calling care_map_generator; and
-  # writes the result to a temp file.
-  temp_care_map_text = MakeTempFile(prefix="caremap_text-",
-                                           suffix=".txt")
-  with open(temp_care_map_text, 'w') as text_file:
-    text_file.write('\n'.join(care_map_list))
-
-  temp_care_map = MakeTempFile(prefix="caremap-", suffix=".pb")
-  care_map_gen_cmd = ["care_map_generator", temp_care_map_text, temp_care_map]
-  RunAndCheckOutput(care_map_gen_cmd)
-
-  if not isinstance(output_file, zipfile.ZipFile):
-    shutil.copy(temp_care_map, output_file)
-    return
-  # output_file is a zip file
-  care_map_path = "META/care_map.pb"
-  if care_map_path in output_file.namelist():
-    # Copy the temp file into the OPTIONS.input_tmp dir and update the
-    # replace_updated_files_list used by add_img_to_target_files
-    if not OPTIONS.replace_updated_files_list:
-      OPTIONS.replace_updated_files_list = []
-    shutil.copy(temp_care_map, os.path.join(OPTIONS.input_tmp, care_map_path))
-    OPTIONS.replace_updated_files_list.append(care_map_path)
-  else:
-    ZipWrite(output_file, temp_care_map, arcname=care_map_path)
-
-
 def IsSparseImage(filepath):
+  if not os.path.exists(filepath):
+    return False
   with open(filepath, 'rb') as fp:
     # Magic for android sparse image format
     # https://source.android.com/devices/bootloader/images
diff --git a/tools/releasetools/fsverity_manifest_generator.py b/tools/releasetools/fsverity_manifest_generator.py
deleted file mode 100644
index b8184bc..0000000
--- a/tools/releasetools/fsverity_manifest_generator.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright 2022 Google Inc. All rights reserved.
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#     http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-`fsverity_manifest_generator` generates build manifest APK file containing
-digests of target files. The APK file is signed so the manifest inside the APK
-can be trusted.
-"""
-
-import argparse
-import common
-import os
-import subprocess
-import sys
-from fsverity_digests_pb2 import FSVerityDigests
-
-HASH_ALGORITHM = 'sha256'
-
-def _digest(fsverity_path, input_file):
-  cmd = [fsverity_path, 'digest', input_file]
-  cmd.extend(['--compact'])
-  cmd.extend(['--hash-alg', HASH_ALGORITHM])
-  out = subprocess.check_output(cmd, universal_newlines=True).strip()
-  return bytes(bytearray.fromhex(out))
-
-if __name__ == '__main__':
-  p = argparse.ArgumentParser()
-  p.add_argument(
-      '--output',
-      help='Path to the output manifest APK',
-      required=True)
-  p.add_argument(
-      '--fsverity-path',
-      help='path to the fsverity program',
-      required=True)
-  p.add_argument(
-      '--aapt2-path',
-      help='path to the aapt2 program',
-      required=True)
-  p.add_argument(
-      '--min-sdk-version',
-      help='minimum supported sdk version of the generated manifest apk',
-      required=True)
-  p.add_argument(
-      '--version-code',
-      help='version code for the generated manifest apk',
-      required=True)
-  p.add_argument(
-      '--version-name',
-      help='version name for the generated manifest apk',
-      required=True)
-  p.add_argument(
-      '--framework-res',
-      help='path to framework-res.apk',
-      required=True)
-  p.add_argument(
-      '--apksigner-path',
-      help='path to the apksigner program',
-      required=True)
-  p.add_argument(
-      '--apk-key-path',
-      help='path to the apk key',
-      required=True)
-  p.add_argument(
-      '--apk-manifest-path',
-      help='path to AndroidManifest.xml',
-      required=True)
-  p.add_argument(
-      '--base-dir',
-      help='directory to use as a relative root for the inputs',
-      required=True)
-  p.add_argument(
-      'inputs',
-      nargs='+',
-      help='input file for the build manifest')
-  args = p.parse_args(sys.argv[1:])
-
-  digests = FSVerityDigests()
-  for f in sorted(args.inputs):
-    # f is a full path for now; make it relative so it starts with {mount_point}/
-    digest = digests.digests[os.path.relpath(f, args.base_dir)]
-    digest.digest = _digest(args.fsverity_path, f)
-    digest.hash_alg = HASH_ALGORITHM
-
-  temp_dir = common.MakeTempDir()
-
-  os.mkdir(os.path.join(temp_dir, "assets"))
-  metadata_path = os.path.join(temp_dir, "assets", "build_manifest.pb")
-  with open(metadata_path, "wb") as f:
-    f.write(digests.SerializeToString())
-
-  common.RunAndCheckOutput([args.aapt2_path, "link",
-      "-A", os.path.join(temp_dir, "assets"),
-      "-o", args.output,
-      "--min-sdk-version", args.min_sdk_version,
-      "--version-code", args.version_code,
-      "--version-name", args.version_name,
-      "-I", args.framework_res,
-      "--manifest", args.apk_manifest_path])
-  common.RunAndCheckOutput([args.apksigner_path, "sign", "--in", args.output,
-      "--cert", args.apk_key_path + ".x509.pem",
-      "--key", args.apk_key_path + ".pk8"])
diff --git a/tools/releasetools/images.py b/tools/releasetools/images.py
index a24148a..d06b979 100644
--- a/tools/releasetools/images.py
+++ b/tools/releasetools/images.py
@@ -149,7 +149,7 @@
 class FileImage(Image):
   """An image wrapped around a raw image file."""
 
-  def __init__(self, path, hashtree_info_generator=None):
+  def __init__(self, path):
     self.path = path
     self.blocksize = 4096
     self._file_size = os.path.getsize(self.path)
@@ -166,10 +166,6 @@
 
     self.generator_lock = threading.Lock()
 
-    self.hashtree_info = None
-    if hashtree_info_generator:
-      self.hashtree_info = hashtree_info_generator.Generate(self)
-
     zero_blocks = []
     nonzero_blocks = []
     reference = '\0' * self.blocksize
@@ -190,8 +186,6 @@
       self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
     if nonzero_blocks:
       self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
-    if self.hashtree_info:
-      self.file_map["__HASHTREE"] = self.hashtree_info.hashtree_range
 
   def __del__(self):
     self._file.close()
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files
deleted file mode 120000
index afaf24b..0000000
--- a/tools/releasetools/img_from_target_files
+++ /dev/null
@@ -1 +0,0 @@
-img_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 76da89c..f8bdd81 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -173,7 +173,7 @@
   logger.info('Writing super.img to archive...')
   with zipfile.ZipFile(
       output_file, 'a', compression=zipfile.ZIP_DEFLATED,
-      allowZip64=not OPTIONS.sparse_userimages) as output_zip:
+      allowZip64=True) as output_zip:
     common.ZipWrite(output_zip, super_file, 'super.img')
 
 
diff --git a/tools/releasetools/merge/OWNERS b/tools/releasetools/merge/OWNERS
index 9012e3a..0eddee2 100644
--- a/tools/releasetools/merge/OWNERS
+++ b/tools/releasetools/merge/OWNERS
@@ -1,3 +1,4 @@
-danielnorman@google.com
+deyaoren@google.com
+haamed@google.com
 jgalmes@google.com
 rseymour@google.com
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index c06fd4c..c95cead 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -149,6 +149,13 @@
 OPTIONS.vendor_dexpreopt_config = None
 
 
+def move_only_exists(source, destination):
+  """Judge whether the file exists and then move the file."""
+
+  if os.path.exists(source):
+    shutil.move(source, destination)
+
+
 def create_merged_package(temp_dir):
   """Merges two target files packages into one target files structure.
 
@@ -286,9 +293,8 @@
   shutil.move(
       os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
       os.path.join(target_files_dir, 'IMAGES', partition_img))
-  shutil.move(
-      os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
-      os.path.join(target_files_dir, 'IMAGES', partition_map))
+  move_only_exists(os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+        os.path.join(target_files_dir, 'IMAGES', partition_map))
 
   def copy_recovery_file(filename):
     for subdir in ('VENDOR', 'SYSTEM/vendor'):
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index f623ad2..e253b02 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -100,20 +100,16 @@
   has_error = False
 
   # Check that partitions only come from one input.
-  for partition in _FRAMEWORK_PARTITIONS.union(_VENDOR_PARTITIONS):
-    image_path = 'IMAGES/{}.img'.format(partition.lower().replace('/', ''))
-    in_framework = (
-        any(item.startswith(partition) for item in OPTIONS.framework_item_list)
-        or image_path in OPTIONS.framework_item_list)
-    in_vendor = (
-        any(item.startswith(partition) for item in OPTIONS.vendor_item_list) or
-        image_path in OPTIONS.vendor_item_list)
-    if in_framework and in_vendor:
-      logger.error(
-          'Cannot extract items from %s for both the framework and vendor'
-          ' builds. Please ensure only one merge config item list'
-          ' includes %s.', partition, partition)
-      has_error = True
+  framework_partitions = ItemListToPartitionSet(OPTIONS.framework_item_list)
+  vendor_partitions = ItemListToPartitionSet(OPTIONS.vendor_item_list)
+  from_both = framework_partitions.intersection(vendor_partitions)
+  if from_both:
+    logger.error(
+        'Cannot extract items from the same partition in both the '
+        'framework and vendor builds. Please ensure only one merge config '
+        'item list (or inferred list) includes each partition: %s' %
+        ','.join(from_both))
+    has_error = True
 
   if any([
       key in OPTIONS.framework_misc_info_keys
@@ -131,7 +127,8 @@
 # system partition). The following regex matches this and extracts the
 # partition name.
 
-_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/\*$')
+_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
+_IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
 
 
 def ItemListToPartitionSet(item_list):
@@ -154,62 +151,89 @@
   partition_set = set()
 
   for item in item_list:
-    partition_match = _PARTITION_ITEM_PATTERN.search(item.strip())
-    partition_tag = partition_match.group(
-        1).lower() if partition_match else None
-
-    if partition_tag:
-      partition_set.add(partition_tag)
+    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+      partition_match = pattern.search(item.strip())
+      if partition_match:
+        partition = partition_match.group(1).lower()
+        # These directories in target-files are not actual partitions.
+        if partition not in ('meta', 'images'):
+          partition_set.add(partition)
 
   return partition_set
 
 
 # Partitions that are grabbed from the framework partial build by default.
 _FRAMEWORK_PARTITIONS = {
-    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm'
-}
-# Partitions that are grabbed from the vendor partial build by default.
-_VENDOR_PARTITIONS = {
-    'vendor', 'odm', 'oem', 'boot', 'vendor_boot', 'recovery',
-    'prebuilt_images', 'radio', 'data', 'vendor_dlkm', 'odm_dlkm'
+    'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
+    'vbmeta_system'
 }
 
 
 def InferItemList(input_namelist, framework):
-  item_list = []
+  item_set = set()
 
-  # Some META items are grabbed from partial builds directly.
+  # Some META items are always grabbed from partial builds directly.
   # Others are combined in merge_meta.py.
   if framework:
-    item_list.extend([
+    item_set.update([
         'META/liblz4.so',
         'META/postinstall_config.txt',
         'META/update_engine_config.txt',
         'META/zucchini_config.txt',
     ])
   else:  # vendor
-    item_list.extend([
+    item_set.update([
         'META/kernel_configs.txt',
         'META/kernel_version.txt',
         'META/otakeys.txt',
+        'META/pack_radioimages.txt',
         'META/releasetools.py',
-        'OTA/android-info.txt',
     ])
 
   # Grab a set of items for the expected partitions in the partial build.
-  for partition in (_FRAMEWORK_PARTITIONS if framework else _VENDOR_PARTITIONS):
-    for namelist in input_namelist:
-      if namelist.startswith('%s/' % partition.upper()):
-        fs_config_prefix = '' if partition == 'system' else '%s_' % partition
-        item_list.extend([
-            '%s/*' % partition.upper(),
-            'IMAGES/%s.img' % partition,
-            'IMAGES/%s.map' % partition,
-            'META/%sfilesystem_config.txt' % fs_config_prefix,
-        ])
-        break
+  seen_partitions = []
+  for namelist in input_namelist:
+    if namelist.endswith('/'):
+      continue
 
-  return sorted(item_list)
+    partition = namelist.split('/')[0].lower()
+
+    # META items are grabbed above, or merged later.
+    if partition == 'meta':
+      continue
+
+    if partition == 'images':
+      image_partition, extension = os.path.splitext(os.path.basename(namelist))
+      if image_partition == 'vbmeta':
+        # Always regenerate vbmeta.img since it depends on hash information
+        # from both builds.
+        continue
+      if extension in ('.img', '.map'):
+        # Include image files in IMAGES/* if the partition comes from
+        # the expected set.
+        if (framework and image_partition in _FRAMEWORK_PARTITIONS) or (
+            not framework and image_partition not in _FRAMEWORK_PARTITIONS):
+          item_set.add(namelist)
+      elif not framework:
+        # Include all miscellaneous non-image files in IMAGES/* from
+        # the vendor build.
+        item_set.add(namelist)
+      continue
+
+    # Skip already-visited partitions.
+    if partition in seen_partitions:
+      continue
+    seen_partitions.append(partition)
+
+    if (framework and partition in _FRAMEWORK_PARTITIONS) or (
+        not framework and partition not in _FRAMEWORK_PARTITIONS):
+      fs_config_prefix = '' if partition == 'system' else '%s_' % partition
+      item_set.update([
+          '%s/*' % partition.upper(),
+          'META/%sfilesystem_config.txt' % fs_config_prefix,
+      ])
+
+  return sorted(item_set)
 
 
 def InferFrameworkMiscInfoKeys(input_namelist):
@@ -223,8 +247,8 @@
   ]
 
   for partition in _FRAMEWORK_PARTITIONS:
-    for namelist in input_namelist:
-      if namelist.startswith('%s/' % partition.upper()):
+    for partition_dir in ('%s/' % partition.upper(), 'SYSTEM/%s/' % partition):
+      if partition_dir in input_namelist:
         fs_type_prefix = '' if partition == 'system' else '%s_' % partition
         keys.extend([
             'avb_%s_hashtree_enable' % partition,
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index 1949050..eceb734 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -108,20 +108,27 @@
 
   def test_ItemListToPartitionSet(self):
     item_list = [
+        'IMAGES/system_ext.img',
         'META/apexkeys.txt',
         'META/apkcerts.txt',
         'META/filesystem_config.txt',
         'PRODUCT/*',
         'SYSTEM/*',
-        'SYSTEM_EXT/*',
+        'SYSTEM/system_ext/*',
     ]
     partition_set = merge_utils.ItemListToPartitionSet(item_list)
     self.assertEqual(set(['product', 'system', 'system_ext']), partition_set)
 
   def test_InferItemList_Framework(self):
     zip_namelist = [
+        'IMAGES/product.img',
+        'IMAGES/product.map',
+        'IMAGES/system.img',
+        'IMAGES/system.map',
         'SYSTEM/my_system_file',
         'PRODUCT/my_product_file',
+        # Device does not use a separate system_ext partition.
+        'SYSTEM/system_ext/system_ext_file',
     ]
 
     item_list = merge_utils.InferItemList(zip_namelist, framework=True)
@@ -147,37 +154,55 @@
     zip_namelist = [
         'VENDOR/my_vendor_file',
         'ODM/my_odm_file',
+        'IMAGES/odm.img',
+        'IMAGES/odm.map',
+        'IMAGES/vendor.img',
+        'IMAGES/vendor.map',
+        'IMAGES/my_custom_image.img',
+        'IMAGES/my_custom_file.txt',
+        'IMAGES/vbmeta.img',
+        'CUSTOM_PARTITION/my_custom_file',
+        # Leftover framework pieces that shouldn't be grabbed.
+        'IMAGES/system.img',
+        'SYSTEM/system_file',
     ]
 
     item_list = merge_utils.InferItemList(zip_namelist, framework=False)
 
     expected_vendor_item_list = [
+        'CUSTOM_PARTITION/*',
+        'IMAGES/my_custom_file.txt',
+        'IMAGES/my_custom_image.img',
         'IMAGES/odm.img',
         'IMAGES/odm.map',
         'IMAGES/vendor.img',
         'IMAGES/vendor.map',
+        'META/custom_partition_filesystem_config.txt',
         'META/kernel_configs.txt',
         'META/kernel_version.txt',
         'META/odm_filesystem_config.txt',
         'META/otakeys.txt',
+        'META/pack_radioimages.txt',
         'META/releasetools.py',
         'META/vendor_filesystem_config.txt',
         'ODM/*',
-        'OTA/android-info.txt',
         'VENDOR/*',
     ]
     self.assertEqual(item_list, expected_vendor_item_list)
 
   def test_InferFrameworkMiscInfoKeys(self):
     zip_namelist = [
-        'SYSTEM/my_system_file',
-        'SYSTEM_EXT/my_system_ext_file',
+        'PRODUCT/',
+        'SYSTEM/',
+        'SYSTEM/system_ext/',
     ]
 
     keys = merge_utils.InferFrameworkMiscInfoKeys(zip_namelist)
 
     expected_keys = [
         'ab_update',
+        'avb_product_add_hashtree_footer_args',
+        'avb_product_hashtree_enable',
         'avb_system_add_hashtree_footer_args',
         'avb_system_ext_add_hashtree_footer_args',
         'avb_system_ext_hashtree_enable',
@@ -186,10 +211,13 @@
         'avb_vbmeta_system_algorithm',
         'avb_vbmeta_system_key_path',
         'avb_vbmeta_system_rollback_index_location',
+        'building_product_image',
         'building_system_ext_image',
         'building_system_image',
         'default_system_dev_certificate',
         'fs_type',
+        'product_disable_sparse',
+        'product_fs_type',
         'system_disable_sparse',
         'system_ext_disable_sparse',
         'system_ext_fs_type',
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
new file mode 100644
index 0000000..7d3d3a3
--- /dev/null
+++ b/tools/releasetools/merge_ota.py
@@ -0,0 +1,262 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import argparse
+import logging
+import struct
+import sys
+import update_payload
+import tempfile
+import zipfile
+import os
+import care_map_pb2
+
+import common
+from typing import BinaryIO, List
+from update_metadata_pb2 import DeltaArchiveManifest, DynamicPartitionMetadata, DynamicPartitionGroup
+from ota_metadata_pb2 import OtaMetadata
+from update_payload import Payload
+
+from payload_signer import PayloadSigner
+from ota_utils import PayloadGenerator, METADATA_PROTO_NAME, FinalizeMetadata
+
+logger = logging.getLogger(__name__)
+
+CARE_MAP_ENTRY = "care_map.pb"
+
+
+def WriteDataBlob(payload: Payload, outfp: BinaryIO, read_size=1024*64):
+  for i in range(0, payload.total_data_length, read_size):
+    blob = payload.ReadDataBlob(
+        i, min(i+read_size, payload.total_data_length)-i)
+    outfp.write(blob)
+
+
+def ConcatBlobs(payloads: List[Payload], outfp: BinaryIO):
+  for payload in payloads:
+    WriteDataBlob(payload, outfp)
+
+
+def TotalDataLength(partitions):
+  for partition in reversed(partitions):
+    for op in reversed(partition.operations):
+      if op.data_length > 0:
+        return op.data_offset + op.data_length
+  return 0
+
+
+def ExtendPartitionUpdates(partitions, new_partitions):
+  prefix_blob_length = TotalDataLength(partitions)
+  partitions.extend(new_partitions)
+  for part in partitions[-len(new_partitions):]:
+    for op in part.operations:
+      if op.HasField("data_length") and op.data_length != 0:
+        op.data_offset += prefix_blob_length
+
+
+class DuplicatePartitionError(ValueError):
+  pass
+
+
+def MergeDynamicPartitionGroups(groups: List[DynamicPartitionGroup], new_groups: List[DynamicPartitionGroup]):
+  new_groups = {new_group.name: new_group for new_group in new_groups}
+  for group in groups:
+    if group.name not in new_groups:
+      continue
+    new_group = new_groups[group.name]
+    common_partitions = set(group.partition_names).intersection(
+        set(new_group.partition_names))
+    if len(common_partitions) != 0:
+      raise DuplicatePartitionError(
+          f"Old group and new group should not have any intersections, {group.partition_names}, {new_group.partition_names}, common partitions: {common_partitions}")
+    group.partition_names.extend(new_group.partition_names)
+    group.size = max(new_group.size, group.size)
+    del new_groups[group.name]
+  for new_group in new_groups.values():
+    groups.append(new_group)
+
+
+def MergeDynamicPartitionMetadata(metadata: DynamicPartitionMetadata, new_metadata: DynamicPartitionMetadata):
+  MergeDynamicPartitionGroups(metadata.groups, new_metadata.groups)
+  metadata.snapshot_enabled &= new_metadata.snapshot_enabled
+  metadata.vabc_enabled &= new_metadata.vabc_enabled
+  assert metadata.vabc_compression_param == new_metadata.vabc_compression_param, f"{metadata.vabc_compression_param} vs. {new_metadata.vabc_compression_param}"
+  metadata.cow_version = max(metadata.cow_version, new_metadata.cow_version)
+
+
+def MergeManifests(payloads: List[Payload]) -> DeltaArchiveManifest:
+  if len(payloads) == 0:
+    return None
+  if len(payloads) == 1:
+    return payloads[0].manifest
+
+  output_manifest = DeltaArchiveManifest()
+  output_manifest.block_size = payloads[0].manifest.block_size
+  output_manifest.partial_update = True
+  output_manifest.dynamic_partition_metadata.snapshot_enabled = payloads[
+      0].manifest.dynamic_partition_metadata.snapshot_enabled
+  output_manifest.dynamic_partition_metadata.vabc_enabled = payloads[
+      0].manifest.dynamic_partition_metadata.vabc_enabled
+  output_manifest.dynamic_partition_metadata.vabc_compression_param = payloads[
+      0].manifest.dynamic_partition_metadata.vabc_compression_param
+  apex_info = {}
+  for payload in payloads:
+    manifest = payload.manifest
+    assert manifest.block_size == output_manifest.block_size
+    output_manifest.minor_version = max(
+        output_manifest.minor_version, manifest.minor_version)
+    output_manifest.max_timestamp = max(
+        output_manifest.max_timestamp, manifest.max_timestamp)
+    output_manifest.apex_info.extend(manifest.apex_info)
+    for apex in manifest.apex_info:
+      apex_info[apex.package_name] = apex
+    ExtendPartitionUpdates(output_manifest.partitions, manifest.partitions)
+    try:
+      MergeDynamicPartitionMetadata(
+        output_manifest.dynamic_partition_metadata, manifest.dynamic_partition_metadata)
+    except DuplicatePartitionError:
+      logger.error(
+          "OTA %s has duplicate partition with some of the previous OTAs", payload.name)
+      raise
+
+  for apex_name in sorted(apex_info.keys()):
+    output_manifest.apex_info.extend(apex_info[apex_name])
+
+  return output_manifest
+
+
+def MergePayloads(payloads: List[Payload]):
+  with tempfile.NamedTemporaryFile(prefix="payload_blob") as tmpfile:
+    ConcatBlobs(payloads, tmpfile)
+
+
+def MergeCareMap(paths: List[str]):
+  care_map = care_map_pb2.CareMap()
+  for path in paths:
+    with zipfile.ZipFile(path, "r", allowZip64=True) as zfp:
+      if CARE_MAP_ENTRY in zfp.namelist():
+        care_map_bytes = zfp.read(CARE_MAP_ENTRY)
+        partial_care_map = care_map_pb2.CareMap()
+        partial_care_map.ParseFromString(care_map_bytes)
+        care_map.partitions.extend(partial_care_map.partitions)
+  if len(care_map.partitions) == 0:
+    return b""
+  return care_map.SerializeToString()
+
+
+def WriteHeaderAndManifest(manifest: DeltaArchiveManifest, fp: BinaryIO):
+  __MAGIC = b"CrAU"
+  __MAJOR_VERSION = 2
+  manifest_bytes = manifest.SerializeToString()
+  fp.write(struct.pack(f">4sQQL", __MAGIC,
+           __MAJOR_VERSION, len(manifest_bytes), 0))
+  fp.write(manifest_bytes)
+
+
+def AddOtaMetadata(input_ota, metadata_ota, output_ota, package_key, pw):
+  with zipfile.ZipFile(metadata_ota, 'r') as zfp:
+    metadata = OtaMetadata()
+    metadata.ParseFromString(zfp.read(METADATA_PROTO_NAME))
+    FinalizeMetadata(metadata, input_ota, output_ota,
+                     package_key=package_key, pw=pw)
+    return output_ota
+
+
+def CheckOutput(output_ota):
+  payload = update_payload.Payload(output_ota)
+  payload.CheckOpDataHash()
+
+
+def CheckDuplicatePartitions(payloads: List[Payload]):
+  partition_to_ota = {}
+  for payload in payloads:
+    for group in payload.manifest.dynamic_partition_metadata.groups:
+      for part in group.partition_names:
+        if part in partition_to_ota:
+          raise DuplicatePartitionError(
+              f"OTA {partition_to_ota[part].name} and {payload.name} have duplicating partition {part}")
+        partition_to_ota[part] = payload
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Merge multiple partial OTAs')
+  parser.add_argument('packages', type=str, nargs='+',
+                      help='Paths to OTA packages to merge')
+  parser.add_argument('--package_key', type=str,
+                      help='Paths to private key for signing payload')
+  parser.add_argument('--search_path', type=str,
+                      help='Search path for framework/signapk.jar')
+  parser.add_argument('--output', type=str,
+                      help='Paths to output merged ota', required=True)
+  parser.add_argument('--metadata_ota', type=str,
+                      help='Output zip will use build metadata from this OTA package, if unspecified, use the last OTA package in merge list')
+  parser.add_argument('--private_key_suffix', type=str,
+                      help='Suffix to be appended to package_key path', default=".pk8")
+  parser.add_argument('-v', action="store_true", help="Enable verbose logging", dest="verbose")
+  args = parser.parse_args(argv[1:])
+  file_paths = args.packages
+
+  common.OPTIONS.verbose = args.verbose
+  if args.verbose:
+    logger.setLevel(logging.INFO)
+
+  logger.info(args)
+  if args.search_path:
+    common.OPTIONS.search_path = args.search_path
+
+  metadata_ota = args.packages[-1]
+  if args.metadata_ota is not None:
+    metadata_ota = args.metadata_ota
+    assert os.path.exists(metadata_ota)
+
+  payloads = [Payload(path) for path in file_paths]
+
+  CheckDuplicatePartitions(payloads)
+
+  merged_manifest = MergeManifests(payloads)
+
+  with tempfile.NamedTemporaryFile() as unsigned_payload:
+    WriteHeaderAndManifest(merged_manifest, unsigned_payload)
+    ConcatBlobs(payloads, unsigned_payload)
+    unsigned_payload.flush()
+
+    generator = PayloadGenerator()
+    generator.payload_file = unsigned_payload.name
+    logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+    if args.package_key:
+      logger.info("Signing payload...")
+      signer = PayloadSigner(args.package_key, args.private_key_suffix)
+      generator.payload_file = unsigned_payload.name
+      generator.Sign(signer)
+
+    logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
+
+    logger.info("Writing to %s", args.output)
+    key_passwords = common.GetKeyPasswords([args.package_key])
+    with tempfile.NamedTemporaryFile(prefix="signed_ota", suffix=".zip") as signed_ota:
+      with zipfile.ZipFile(signed_ota, "w") as zfp:
+        generator.WriteToZip(zfp)
+        care_map_bytes = MergeCareMap(args.packages)
+        if care_map_bytes:
+          zfp.writestr(CARE_MAP_ENTRY, care_map_bytes)
+      AddOtaMetadata(signed_ota.name, metadata_ota,
+                     args.output, args.package_key, key_passwords[args.package_key])
+  return 0
+
+
+
+
+if __name__ == '__main__':
+  logging.basicConfig()
+  sys.exit(main(sys.argv))
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 9732cda..ac85aa4 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -40,12 +40,9 @@
                                         info_dict=source_info,
                                         allow_shared_blocks=allow_shared_blocks)
 
-    hashtree_info_generator = verity_utils.CreateHashtreeInfoGenerator(
-        name, 4096, target_info)
     partition_tgt = common.GetUserImage(name, OPTIONS.target_tmp, target_zip,
                                         info_dict=target_info,
-                                        allow_shared_blocks=allow_shared_blocks,
-                                        hashtree_info_generator=hashtree_info_generator)
+                                        allow_shared_blocks=allow_shared_blocks)
 
     # Check the first block of the source system partition for remount R/W only
     # if the filesystem is ext4.
@@ -280,7 +277,7 @@
 
   # We haven't written the metadata entry, which will be done in
   # FinalizeMetadata.
-  common.ZipClose(output_zip)
+  output_zip.close()
 
   needed_property_files = (
       NonAbOtaPropertyFiles(),
@@ -534,7 +531,7 @@
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  common.ZipClose(output_zip)
+  output_zip.close()
 
   # Sign the generated zip package unless no_signing is specified.
   needed_property_files = (
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
deleted file mode 120000
index 6755a90..0000000
--- a/tools/releasetools/ota_from_target_files
+++ /dev/null
@@ -1 +0,0 @@
-ota_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 522d489..60e95ad 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -244,6 +244,9 @@
 
   --vabc_compression_param
       Compression algorithm to be used for VABC. Available options: gz, brotli, none
+
+  --security_patch_level
+      Override the security patch level in target files
 """
 
 from __future__ import print_function
@@ -255,7 +258,6 @@
 import re
 import shlex
 import shutil
-import struct
 import subprocess
 import sys
 import zipfile
@@ -264,11 +266,12 @@
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PropertyFiles, SECURITY_PATCH_LEVEL_PROP_NAME, GetZipEntryOffset)
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
 from common import IsSparseImage
 import target_files_diff
 from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
+from payload_signer import PayloadSigner
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -316,6 +319,7 @@
 OPTIONS.enable_zucchini = True
 OPTIONS.enable_lz4diff = False
 OPTIONS.vabc_compression_param = None
+OPTIONS.security_patch_level = None
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -335,207 +339,6 @@
     'vendor', 'vendor_boot']
 
 
-class PayloadSigner(object):
-  """A class that wraps the payload signing works.
-
-  When generating a Payload, hashes of the payload and metadata files will be
-  signed with the device key, either by calling an external payload signer or
-  by calling openssl with the package key. This class provides a unified
-  interface, so that callers can just call PayloadSigner.Sign().
-
-  If an external payload signer has been specified (OPTIONS.payload_signer), it
-  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
-  that the signing key should be provided as part of the payload_signer_args.
-  Otherwise without an external signer, it uses the package key
-  (OPTIONS.package_key) and calls openssl for the signing works.
-  """
-
-  def __init__(self):
-    if OPTIONS.payload_signer is None:
-      # Prepare the payload signing key.
-      private_key = OPTIONS.package_key + OPTIONS.private_key_suffix
-      pw = OPTIONS.key_passwords[OPTIONS.package_key]
-
-      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
-      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
-      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
-      cmd.extend(["-out", signing_key])
-      common.RunAndCheckOutput(cmd, verbose=False)
-
-      self.signer = "openssl"
-      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
-                          "-pkeyopt", "digest:sha256"]
-      self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
-          signing_key)
-    else:
-      self.signer = OPTIONS.payload_signer
-      self.signer_args = OPTIONS.payload_signer_args
-      if OPTIONS.payload_signer_maximum_signature_size:
-        self.maximum_signature_size = int(
-            OPTIONS.payload_signer_maximum_signature_size)
-      else:
-        # The legacy config uses RSA2048 keys.
-        logger.warning("The maximum signature size for payload signer is not"
-                       " set, default to 256 bytes.")
-        self.maximum_signature_size = 256
-
-  @staticmethod
-  def _GetMaximumSignatureSizeInBytes(signing_key):
-    out_signature_size_file = common.MakeTempFile("signature_size")
-    cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
-        out_signature_size_file), "--private_key={}".format(signing_key)]
-    common.RunAndCheckOutput(cmd)
-    with open(out_signature_size_file) as f:
-      signature_size = f.read().rstrip()
-    logger.info("%s outputs the maximum signature size: %s", cmd[0],
-                signature_size)
-    return int(signature_size)
-
-  def Sign(self, in_file):
-    """Signs the given input file. Returns the output filename."""
-    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
-    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
-    common.RunAndCheckOutput(cmd)
-    return out_file
-
-
-class Payload(object):
-  """Manages the creation and the signing of an A/B OTA Payload."""
-
-  PAYLOAD_BIN = 'payload.bin'
-  PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
-  SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
-  SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
-
-  def __init__(self, secondary=False):
-    """Initializes a Payload instance.
-
-    Args:
-      secondary: Whether it's generating a secondary payload (default: False).
-    """
-    self.payload_file = None
-    self.payload_properties = None
-    self.secondary = secondary
-
-  def _Run(self, cmd):  # pylint: disable=no-self-use
-    # Don't pipe (buffer) the output if verbose is set. Let
-    # brillo_update_payload write to stdout/stderr directly, so its progress can
-    # be monitored.
-    if OPTIONS.verbose:
-      common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
-    else:
-      common.RunAndCheckOutput(cmd)
-
-  def Generate(self, target_file, source_file=None, additional_args=None):
-    """Generates a payload from the given target-files zip(s).
-
-    Args:
-      target_file: The filename of the target build target-files zip.
-      source_file: The filename of the source build target-files zip; or None if
-          generating a full OTA.
-      additional_args: A list of additional args that should be passed to
-          brillo_update_payload script; or None.
-    """
-    if additional_args is None:
-      additional_args = []
-
-    payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-    cmd = ["brillo_update_payload", "generate",
-           "--payload", payload_file,
-           "--target_image", target_file]
-    if source_file is not None:
-      cmd.extend(["--source_image", source_file])
-      if OPTIONS.disable_fec_computation:
-        cmd.extend(["--disable_fec_computation", "true"])
-      if OPTIONS.disable_verity_computation:
-        cmd.extend(["--disable_verity_computation", "true"])
-    cmd.extend(additional_args)
-    self._Run(cmd)
-
-    self.payload_file = payload_file
-    self.payload_properties = None
-
-  def Sign(self, payload_signer):
-    """Generates and signs the hashes of the payload and metadata.
-
-    Args:
-      payload_signer: A PayloadSigner() instance that serves the signing work.
-
-    Raises:
-      AssertionError: On any failure when calling brillo_update_payload script.
-    """
-    assert isinstance(payload_signer, PayloadSigner)
-
-    # 1. Generate hashes of the payload and metadata files.
-    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
-    cmd = ["brillo_update_payload", "hash",
-           "--unsigned_payload", self.payload_file,
-           "--signature_size", str(payload_signer.maximum_signature_size),
-           "--metadata_hash_file", metadata_sig_file,
-           "--payload_hash_file", payload_sig_file]
-    self._Run(cmd)
-
-    # 2. Sign the hashes.
-    signed_payload_sig_file = payload_signer.Sign(payload_sig_file)
-    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
-
-    # 3. Insert the signatures back into the payload file.
-    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
-                                              suffix=".bin")
-    cmd = ["brillo_update_payload", "sign",
-           "--unsigned_payload", self.payload_file,
-           "--payload", signed_payload_file,
-           "--signature_size", str(payload_signer.maximum_signature_size),
-           "--metadata_signature_file", signed_metadata_sig_file,
-           "--payload_signature_file", signed_payload_sig_file]
-    self._Run(cmd)
-
-    # 4. Dump the signed payload properties.
-    properties_file = common.MakeTempFile(prefix="payload-properties-",
-                                          suffix=".txt")
-    cmd = ["brillo_update_payload", "properties",
-           "--payload", signed_payload_file,
-           "--properties_file", properties_file]
-    self._Run(cmd)
-
-    if self.secondary:
-      with open(properties_file, "a") as f:
-        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
-
-    if OPTIONS.wipe_user_data:
-      with open(properties_file, "a") as f:
-        f.write("POWERWASH=1\n")
-
-    self.payload_file = signed_payload_file
-    self.payload_properties = properties_file
-
-  def WriteToZip(self, output_zip):
-    """Writes the payload to the given zip.
-
-    Args:
-      output_zip: The output ZipFile instance.
-    """
-    assert self.payload_file is not None
-    assert self.payload_properties is not None
-
-    if self.secondary:
-      payload_arcname = Payload.SECONDARY_PAYLOAD_BIN
-      payload_properties_arcname = Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT
-    else:
-      payload_arcname = Payload.PAYLOAD_BIN
-      payload_properties_arcname = Payload.PAYLOAD_PROPERTIES_TXT
-
-    # Add the signed payload file and properties into the zip. In order to
-    # support streaming, we pack them as ZIP_STORED. So these entries can be
-    # read directly with the offset and length pairs.
-    common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
-                    compress_type=zipfile.ZIP_STORED)
-    common.ZipWrite(output_zip, self.payload_properties,
-                    arcname=payload_properties_arcname,
-                    compress_type=zipfile.ZIP_STORED)
-
-
 def _LoadOemDicts(oem_source):
   """Returns the list of loaded OEM properties dict."""
   if not oem_source:
@@ -547,113 +350,6 @@
   return oem_dicts
 
 
-class StreamingPropertyFiles(PropertyFiles):
-  """A subclass for computing the property-files for streaming A/B OTAs."""
-
-  def __init__(self):
-    super(StreamingPropertyFiles, self).__init__()
-    self.name = 'ota-streaming-property-files'
-    self.required = (
-        # payload.bin and payload_properties.txt must exist.
-        'payload.bin',
-        'payload_properties.txt',
-    )
-    self.optional = (
-        # apex_info.pb isn't directly used in the update flow
-        'apex_info.pb',
-        # care_map is available only if dm-verity is enabled.
-        'care_map.pb',
-        'care_map.txt',
-        # compatibility.zip is available only if target supports Treble.
-        'compatibility.zip',
-    )
-
-
-class AbOtaPropertyFiles(StreamingPropertyFiles):
-  """The property-files for A/B OTA that includes payload_metadata.bin info.
-
-  Since P, we expose one more token (aka property-file), in addition to the ones
-  for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
-  'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
-  doesn't exist as a separate ZIP entry, but can be used to verify if the
-  payload can be applied on the given device.
-
-  For backward compatibility, we keep both of the 'ota-streaming-property-files'
-  and the newly added 'ota-property-files' in P. The new token will only be
-  available in 'ota-property-files'.
-  """
-
-  def __init__(self):
-    super(AbOtaPropertyFiles, self).__init__()
-    self.name = 'ota-property-files'
-
-  def _GetPrecomputed(self, input_zip):
-    offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
-    return ['payload_metadata.bin:{}:{}'.format(offset, size)]
-
-  @staticmethod
-  def _GetPayloadMetadataOffsetAndSize(input_zip):
-    """Computes the offset and size of the payload metadata for a given package.
-
-    (From system/update_engine/update_metadata.proto)
-    A delta update file contains all the deltas needed to update a system from
-    one specific version to another specific version. The update format is
-    represented by this struct pseudocode:
-
-    struct delta_update_file {
-      char magic[4] = "CrAU";
-      uint64 file_format_version;
-      uint64 manifest_size;  // Size of protobuf DeltaArchiveManifest
-
-      // Only present if format_version > 1:
-      uint32 metadata_signature_size;
-
-      // The Bzip2 compressed DeltaArchiveManifest
-      char manifest[metadata_signature_size];
-
-      // The signature of the metadata (from the beginning of the payload up to
-      // this location, not including the signature itself). This is a
-      // serialized Signatures message.
-      char medatada_signature_message[metadata_signature_size];
-
-      // Data blobs for files, no specific format. The specific offset
-      // and length of each data blob is recorded in the DeltaArchiveManifest.
-      struct {
-        char data[];
-      } blobs[];
-
-      // These two are not signed:
-      uint64 payload_signatures_message_size;
-      char payload_signatures_message[];
-    };
-
-    'payload-metadata.bin' contains all the bytes from the beginning of the
-    payload, till the end of 'medatada_signature_message'.
-    """
-    payload_info = input_zip.getinfo('payload.bin')
-    (payload_offset, payload_size) = GetZipEntryOffset(input_zip, payload_info)
-
-    # Read the underlying raw zipfile at specified offset
-    payload_fp = input_zip.fp
-    payload_fp.seek(payload_offset)
-    header_bin = payload_fp.read(24)
-
-    # network byte order (big-endian)
-    header = struct.unpack("!IQQL", header_bin)
-
-    # 'CrAU'
-    magic = header[0]
-    assert magic == 0x43724155, "Invalid magic: {:x}, computed offset {}" \
-        .format(magic, payload_offset)
-
-    manifest_size = header[2]
-    metadata_signature_size = header[3]
-    metadata_total = 24 + manifest_size + metadata_signature_size
-    assert metadata_total < payload_size
-
-    return (payload_offset, metadata_total)
-
-
 def ModifyVABCCompressionParam(content, algo):
   """ Update update VABC Compression Param in dynamic_partitions_info.txt
   Args:
@@ -791,7 +487,7 @@
       else:
         common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
 
-  common.ZipClose(target_zip)
+  target_zip.close()
 
   return target_file
 
@@ -928,7 +624,7 @@
 
     # TODO(xunchang) handle META/postinstall_config.txt'
 
-  common.ZipClose(partial_target_zip)
+  partial_target_zip.close()
 
   return partial_target_file
 
@@ -1013,7 +709,7 @@
   # Write new ab_partitions.txt file
   common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
 
-  common.ZipClose(target_zip)
+  target_zip.close()
 
   return target_file
 
@@ -1068,11 +764,12 @@
         pre_partition_state, post_partition_state):
   assert pre_partition_state is not None
   partition_timestamps = {}
-  for part in pre_partition_state:
-    partition_timestamps[part.partition_name] = part.version
   for part in post_partition_state:
-    partition_timestamps[part.partition_name] = \
-        max(part.version, partition_timestamps[part.partition_name])
+    partition_timestamps[part.partition_name] = part.version
+  for part in pre_partition_state:
+    if part.partition_name in partition_timestamps:
+      partition_timestamps[part.partition_name] = \
+          max(part.version, partition_timestamps[part.partition_name])
   return [
       "--partition_timestamps",
       ",".join([key + ":" + val for (key, val)
@@ -1145,6 +842,14 @@
       logger.info("Either source or target does not support VABC, disabling.")
       OPTIONS.disable_vabc = True
 
+    # Virtual AB Compression was introduced in Androd S.
+    # Later, we backported VABC to Android R. But verity support was not
+    # backported, so if VABC is used and we are on Android R, disable
+    # verity computation.
+    if not OPTIONS.disable_vabc and source_info.is_android_r:
+      OPTIONS.disable_verity_computation = True
+      OPTIONS.disable_fec_computation = True
+
   else:
     assert "ab_partitions" in OPTIONS.info_dict, \
         "META/ab_partitions.txt is required for ab_update."
@@ -1192,7 +897,7 @@
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = Payload()
+  payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -1208,8 +913,17 @@
         metadata.postcondition.partition_state)
 
   if not ota_utils.IsZucchiniCompatible(source_file, target_file):
+    logger.warning(
+        "Builds doesn't support zucchini, or source/target don't have compatible zucchini versions. Disabling zucchini.")
     OPTIONS.enable_zucchini = False
 
+  security_patch_level = target_info.GetBuildProp(
+      "ro.build.version.security_patch")
+  if OPTIONS.security_patch_level is not None:
+    security_patch_level = OPTIONS.security_patch_level
+
+  additional_args += ["--security_patch_level", security_patch_level]
+
   additional_args += ["--enable_zucchini",
                       str(OPTIONS.enable_zucchini).lower()]
 
@@ -1255,7 +969,10 @@
   )
 
   # Sign the payload.
-  payload_signer = PayloadSigner()
+  pw = OPTIONS.key_passwords[OPTIONS.package_key]
+  payload_signer = PayloadSigner(
+      OPTIONS.package_key, OPTIONS.private_key_suffix,
+      pw, OPTIONS.payload_signer)
   payload.Sign(payload_signer)
 
   # Write the payload into output zip.
@@ -1268,7 +985,7 @@
     # building an incremental OTA. See the comments for "--include_secondary".
     secondary_target_file = GetTargetFilesZipForSecondaryImages(
         target_file, OPTIONS.skip_postinstall)
-    secondary_payload = Payload(secondary=True)
+    secondary_payload = PayloadGenerator(secondary=True)
     secondary_payload.Generate(secondary_target_file,
                                additional_args=["--max_timestamp",
                                                 max_timestamp])
@@ -1278,8 +995,7 @@
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
   target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
-  if (target_info.get("verity") == "true" or
-          target_info.get("avb_enable") == "true"):
+  if target_info.get("avb_enable") == "true":
     care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
                      "META/" + x in target_zip.namelist()]
 
@@ -1301,21 +1017,14 @@
     common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
                        compress_type=zipfile.ZIP_STORED)
 
-  common.ZipClose(target_zip)
+  target_zip.close()
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  common.ZipClose(output_zip)
+  output_zip.close()
 
-  # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
-  # all the info of the latter. However, system updaters and OTA servers need to
-  # take time to switch to the new flag. We keep both of the flags for
-  # P-timeframe, and will remove StreamingPropertyFiles in later release.
-  needed_property_files = (
-      AbOtaPropertyFiles(),
-      StreamingPropertyFiles(),
-  )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   package_key=OPTIONS.package_key)
 
 
 def main(argv):
@@ -1427,6 +1136,8 @@
       OPTIONS.enable_lz4diff = a.lower() != "false"
     elif o == "--vabc_compression_param":
       OPTIONS.vabc_compression_param = a.lower()
+    elif o == "--security_patch_level":
+      OPTIONS.security_patch_level = a
     else:
       return False
     return True
@@ -1477,6 +1188,7 @@
                                  "enable_zucchini=",
                                  "enable_lz4diff=",
                                  "vabc_compression_param=",
+                                 "security_patch_level=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
diff --git a/tools/releasetools/ota_metadata_pb2.py b/tools/releasetools/ota_metadata_pb2.py
index 2552464..012d9ab 100644
--- a/tools/releasetools/ota_metadata_pb2.py
+++ b/tools/releasetools/ota_metadata_pb2.py
@@ -19,8 +19,8 @@
   name='ota_metadata.proto',
   package='build.tools.releasetools',
   syntax='proto3',
-  serialized_options=_b('H\003'),
-  serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"c\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42\x02H\x03\x62\x06proto3')
+  serialized_options=_b('\n\013android.otaB\022OtaPackageMetadataH\003'),
+  serialized_pb=_b('\n\x12ota_metadata.proto\x12\x18\x62uild.tools.releasetools\"X\n\x0ePartitionState\x12\x16\n\x0epartition_name\x18\x01 \x01(\t\x12\x0e\n\x06\x64\x65vice\x18\x02 \x03(\t\x12\r\n\x05\x62uild\x18\x03 \x03(\t\x12\x0f\n\x07version\x18\x04 \x01(\t\"\xce\x01\n\x0b\x44\x65viceState\x12\x0e\n\x06\x64\x65vice\x18\x01 \x03(\t\x12\r\n\x05\x62uild\x18\x02 \x03(\t\x12\x19\n\x11\x62uild_incremental\x18\x03 \x01(\t\x12\x11\n\ttimestamp\x18\x04 \x01(\x03\x12\x11\n\tsdk_level\x18\x05 \x01(\t\x12\x1c\n\x14security_patch_level\x18\x06 \x01(\t\x12\x41\n\x0fpartition_state\x18\x07 \x03(\x0b\x32(.build.tools.releasetools.PartitionState\"{\n\x08\x41pexInfo\x12\x14\n\x0cpackage_name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\x03\x12\x15\n\ris_compressed\x18\x03 \x01(\x08\x12\x19\n\x11\x64\x65\x63ompressed_size\x18\x04 \x01(\x03\x12\x16\n\x0esource_version\x18\x05 \x01(\x03\"E\n\x0c\x41pexMetadata\x12\x35\n\tapex_info\x18\x01 \x03(\x0b\x32\".build.tools.releasetools.ApexInfo\"\xf8\x03\n\x0bOtaMetadata\x12;\n\x04type\x18\x01 \x01(\x0e\x32-.build.tools.releasetools.OtaMetadata.OtaType\x12\x0c\n\x04wipe\x18\x02 \x01(\x08\x12\x11\n\tdowngrade\x18\x03 \x01(\x08\x12P\n\x0eproperty_files\x18\x04 \x03(\x0b\x32\x38.build.tools.releasetools.OtaMetadata.PropertyFilesEntry\x12;\n\x0cprecondition\x18\x05 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12<\n\rpostcondition\x18\x06 \x01(\x0b\x32%.build.tools.releasetools.DeviceState\x12#\n\x1bretrofit_dynamic_partitions\x18\x07 \x01(\x08\x12\x16\n\x0erequired_cache\x18\x08 \x01(\x03\x12\x15\n\rspl_downgrade\x18\t \x01(\x08\x1a\x34\n\x12PropertyFilesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01\"4\n\x07OtaType\x12\x0b\n\x07UNKNOWN\x10\x00\x12\x06\n\x02\x41\x42\x10\x01\x12\t\n\x05\x42LOCK\x10\x02\x12\t\n\x05\x42RICK\x10\x03\x42#\n\x0b\x61ndroid.otaB\x12OtaPackageMetadataH\x03\x62\x06proto3')
 )
 
 
@@ -50,8 +50,8 @@
   ],
   containing_type=None,
   serialized_options=None,
-  serialized_start=972,
-  serialized_end=1024,
+  serialized_start=996,
+  serialized_end=1048,
 )
 _sym_db.RegisterEnumDescriptor(_OTAMETADATA_OTATYPE)
 
@@ -216,6 +216,13 @@
       message_type=None, enum_type=None, containing_type=None,
       is_extension=False, extension_scope=None,
       serialized_options=None, file=DESCRIPTOR),
+    _descriptor.FieldDescriptor(
+      name='source_version', full_name='build.tools.releasetools.ApexInfo.source_version', index=4,
+      number=5, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      serialized_options=None, file=DESCRIPTOR),
   ],
   extensions=[
   ],
@@ -229,7 +236,7 @@
   oneofs=[
   ],
   serialized_start=347,
-  serialized_end=446,
+  serialized_end=470,
 )
 
 
@@ -259,8 +266,8 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=448,
-  serialized_end=517,
+  serialized_start=472,
+  serialized_end=541,
 )
 
 
@@ -297,8 +304,8 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=918,
-  serialized_end=970,
+  serialized_start=942,
+  serialized_end=994,
 )
 
 _OTAMETADATA = _descriptor.Descriptor(
@@ -384,8 +391,8 @@
   extension_ranges=[],
   oneofs=[
   ],
-  serialized_start=520,
-  serialized_end=1024,
+  serialized_start=544,
+  serialized_end=1048,
 )
 
 _DEVICESTATE.fields_by_name['partition_state'].message_type = _PARTITIONSTATE
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 5d403dc..e36a2be 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -16,13 +16,18 @@
 import itertools
 import logging
 import os
+import shutil
 import struct
 import zipfile
 
 import ota_metadata_pb2
-from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
+import common
+from common import (ZipDelete, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
-                    SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps)
+                    SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
+                    GetRamdiskFormat)
+from payload_signer import PayloadSigner
+
 
 logger = logging.getLogger(__name__)
 
@@ -43,7 +48,7 @@
 SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
 
 
-def FinalizeMetadata(metadata, input_file, output_file, needed_property_files):
+def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=None, package_key=None, pw=None):
   """Finalizes the metadata and signs an A/B OTA package.
 
   In order to stream an A/B OTA package, we need 'ota-streaming-property-files'
@@ -61,32 +66,42 @@
     input_file: The input ZIP filename that doesn't contain the package METADATA
         entry yet.
     output_file: The final output ZIP filename.
-    needed_property_files: The list of PropertyFiles' to be generated.
+    needed_property_files: The list of PropertyFiles' to be generated. Default is [AbOtaPropertyFiles(), StreamingPropertyFiles()]
+    package_key: The key used to sign this OTA package
+    pw: Password for the package_key
   """
+  no_signing = package_key is None
+
+  if needed_property_files is None:
+    # AbOtaPropertyFiles intends to replace StreamingPropertyFiles, as it covers
+    # all the info of the latter. However, system updaters and OTA servers need to
+    # take time to switch to the new flag. We keep both of the flags for
+    # P-timeframe, and will remove StreamingPropertyFiles in later release.
+    needed_property_files = (
+        AbOtaPropertyFiles(),
+        StreamingPropertyFiles(),
+    )
 
   def ComputeAllPropertyFiles(input_file, needed_property_files):
     # Write the current metadata entry with placeholders.
-    with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
       for property_files in needed_property_files:
         metadata.property_files[property_files.name] = property_files.Compute(
             input_zip)
-      namelist = input_zip.namelist()
 
-    if METADATA_NAME in namelist or METADATA_PROTO_NAME in namelist:
-      ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME])
-    output_zip = zipfile.ZipFile(input_file, 'a', allowZip64=True)
-    WriteMetadata(metadata, output_zip)
-    ZipClose(output_zip)
+    ZipDelete(input_file, [METADATA_NAME, METADATA_PROTO_NAME], True)
+    with zipfile.ZipFile(input_file, 'a', allowZip64=True) as output_zip:
+      WriteMetadata(metadata, output_zip)
 
-    if OPTIONS.no_signing:
+    if no_signing:
       return input_file
 
     prelim_signing = MakeTempFile(suffix='.zip')
-    SignOutput(input_file, prelim_signing)
+    SignOutput(input_file, prelim_signing, package_key, pw)
     return prelim_signing
 
   def FinalizeAllPropertyFiles(prelim_signing, needed_property_files):
-    with zipfile.ZipFile(prelim_signing, allowZip64=True) as prelim_signing_zip:
+    with zipfile.ZipFile(prelim_signing, 'r', allowZip64=True) as prelim_signing_zip:
       for property_files in needed_property_files:
         metadata.property_files[property_files.name] = property_files.Finalize(
             prelim_signing_zip,
@@ -112,15 +127,14 @@
 
   # Replace the METADATA entry.
   ZipDelete(prelim_signing, [METADATA_NAME, METADATA_PROTO_NAME])
-  output_zip = zipfile.ZipFile(prelim_signing, 'a', allowZip64=True)
-  WriteMetadata(metadata, output_zip)
-  ZipClose(output_zip)
+  with zipfile.ZipFile(prelim_signing, 'a', allowZip64=True) as output_zip:
+    WriteMetadata(metadata, output_zip)
 
   # Re-sign the package after updating the metadata entry.
-  if OPTIONS.no_signing:
-    output_file = prelim_signing
+  if no_signing:
+    shutil.copy(prelim_signing, output_file)
   else:
-    SignOutput(prelim_signing, output_file)
+    SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
   with zipfile.ZipFile(output_file, allowZip64=True) as output_zip:
@@ -371,15 +385,18 @@
     for partition in PARTITIONS_WITH_BUILD_PROP:
       partition_prop_key = "{}.build.prop".format(partition)
       input_file = info_dict[partition_prop_key].input_file
+      ramdisk = GetRamdiskFormat(info_dict)
       if isinstance(input_file, zipfile.ZipFile):
         with zipfile.ZipFile(input_file.filename, allowZip64=True) as input_zip:
           info_dict[partition_prop_key] = \
               PartitionBuildProps.FromInputFile(input_zip, partition,
-                                                placeholder_values)
+                                                placeholder_values,
+                                                ramdisk)
       else:
         info_dict[partition_prop_key] = \
             PartitionBuildProps.FromInputFile(input_file, partition,
-                                              placeholder_values)
+                                              placeholder_values,
+                                              ramdisk)
     info_dict["build.prop"] = info_dict["system.build.prop"]
     build_info_set.add(BuildInfo(info_dict, default_build_info.oem_dicts))
 
@@ -570,7 +587,7 @@
     else:
       tokens.append(ComputeEntryOffsetSize(METADATA_NAME))
       if METADATA_PROTO_NAME in zip_file.namelist():
-          tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
+        tokens.append(ComputeEntryOffsetSize(METADATA_PROTO_NAME))
 
     return ','.join(tokens)
 
@@ -592,10 +609,13 @@
     return []
 
 
-def SignOutput(temp_zip_name, output_zip_name):
-  pw = OPTIONS.key_passwords[OPTIONS.package_key]
+def SignOutput(temp_zip_name, output_zip_name, package_key=None, pw=None):
+  if package_key is None:
+    package_key = OPTIONS.package_key
+  if pw is None and OPTIONS.key_passwords:
+    pw = OPTIONS.key_passwords[package_key]
 
-  SignFile(temp_zip_name, output_zip_name, OPTIONS.package_key, pw,
+  SignFile(temp_zip_name, output_zip_name, package_key, pw,
            whole_file=True)
 
 
@@ -689,10 +709,255 @@
         if entry in zfp.namelist():
           return zfp.read(entry).decode()
     else:
-      entry_path = os.path.join(entry, path)
+      entry_path = os.path.join(path, entry)
       if os.path.exists(entry_path):
         with open(entry_path, "r") as fp:
           return fp.read()
-      else:
-        return ""
-  return ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME) == ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+    return False
+  sourceEntry = ReadEntry(source_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+  targetEntry = ReadEntry(target_file, _ZUCCHINI_CONFIG_ENTRY_NAME)
+  return sourceEntry and targetEntry and sourceEntry == targetEntry
+
+
+class PayloadGenerator(object):
+  """Manages the creation and the signing of an A/B OTA Payload."""
+
+  PAYLOAD_BIN = 'payload.bin'
+  PAYLOAD_PROPERTIES_TXT = 'payload_properties.txt'
+  SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
+  SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
+
+  def __init__(self, secondary=False, wipe_user_data=False):
+    """Initializes a Payload instance.
+
+    Args:
+      secondary: Whether it's generating a secondary payload (default: False).
+    """
+    self.payload_file = None
+    self.payload_properties = None
+    self.secondary = secondary
+    self.wipe_user_data = wipe_user_data
+
+  def _Run(self, cmd):  # pylint: disable=no-self-use
+    # Don't pipe (buffer) the output if verbose is set. Let
+    # brillo_update_payload write to stdout/stderr directly, so its progress can
+    # be monitored.
+    if OPTIONS.verbose:
+      common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+    else:
+      common.RunAndCheckOutput(cmd)
+
+  def Generate(self, target_file, source_file=None, additional_args=None):
+    """Generates a payload from the given target-files zip(s).
+
+    Args:
+      target_file: The filename of the target build target-files zip.
+      source_file: The filename of the source build target-files zip; or None if
+          generating a full OTA.
+      additional_args: A list of additional args that should be passed to
+          brillo_update_payload script; or None.
+    """
+    if additional_args is None:
+      additional_args = []
+
+    payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+    cmd = ["brillo_update_payload", "generate",
+           "--payload", payload_file,
+           "--target_image", target_file]
+    if source_file is not None:
+      cmd.extend(["--source_image", source_file])
+      if OPTIONS.disable_fec_computation:
+        cmd.extend(["--disable_fec_computation", "true"])
+      if OPTIONS.disable_verity_computation:
+        cmd.extend(["--disable_verity_computation", "true"])
+    cmd.extend(additional_args)
+    self._Run(cmd)
+
+    self.payload_file = payload_file
+    self.payload_properties = None
+
+  def Sign(self, payload_signer):
+    """Generates and signs the hashes of the payload and metadata.
+
+    Args:
+      payload_signer: A PayloadSigner() instance that serves the signing work.
+
+    Raises:
+      AssertionError: On any failure when calling brillo_update_payload script.
+    """
+    assert isinstance(payload_signer, PayloadSigner)
+
+    # 1. Generate hashes of the payload and metadata files.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ["brillo_update_payload", "hash",
+           "--unsigned_payload", self.payload_file,
+           "--signature_size", str(payload_signer.maximum_signature_size),
+           "--metadata_hash_file", metadata_sig_file,
+           "--payload_hash_file", payload_sig_file]
+    self._Run(cmd)
+
+    # 2. Sign the hashes.
+    signed_payload_sig_file = payload_signer.SignHashFile(payload_sig_file)
+    signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
+
+    # 3. Insert the signatures back into the payload file.
+    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                              suffix=".bin")
+    cmd = ["brillo_update_payload", "sign",
+           "--unsigned_payload", self.payload_file,
+           "--payload", signed_payload_file,
+           "--signature_size", str(payload_signer.maximum_signature_size),
+           "--metadata_signature_file", signed_metadata_sig_file,
+           "--payload_signature_file", signed_payload_sig_file]
+    self._Run(cmd)
+
+    self.payload_file = signed_payload_file
+
+  def WriteToZip(self, output_zip):
+    """Writes the payload to the given zip.
+
+    Args:
+      output_zip: The output ZipFile instance.
+    """
+    assert self.payload_file is not None
+    # 4. Dump the signed payload properties.
+    properties_file = common.MakeTempFile(prefix="payload-properties-",
+                                          suffix=".txt")
+    cmd = ["brillo_update_payload", "properties",
+           "--payload", self.payload_file,
+           "--properties_file", properties_file]
+    self._Run(cmd)
+
+    if self.secondary:
+      with open(properties_file, "a") as f:
+        f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+
+    if self.wipe_user_data:
+      with open(properties_file, "a") as f:
+        f.write("POWERWASH=1\n")
+
+    self.payload_properties = properties_file
+
+    if self.secondary:
+      payload_arcname = PayloadGenerator.SECONDARY_PAYLOAD_BIN
+      payload_properties_arcname = PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT
+    else:
+      payload_arcname = PayloadGenerator.PAYLOAD_BIN
+      payload_properties_arcname = PayloadGenerator.PAYLOAD_PROPERTIES_TXT
+
+    # Add the signed payload file and properties into the zip. In order to
+    # support streaming, we pack them as ZIP_STORED. So these entries can be
+    # read directly with the offset and length pairs.
+    common.ZipWrite(output_zip, self.payload_file, arcname=payload_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+    common.ZipWrite(output_zip, self.payload_properties,
+                    arcname=payload_properties_arcname,
+                    compress_type=zipfile.ZIP_STORED)
+
+
+class StreamingPropertyFiles(PropertyFiles):
+  """A subclass for computing the property-files for streaming A/B OTAs."""
+
+  def __init__(self):
+    super(StreamingPropertyFiles, self).__init__()
+    self.name = 'ota-streaming-property-files'
+    self.required = (
+        # payload.bin and payload_properties.txt must exist.
+        'payload.bin',
+        'payload_properties.txt',
+    )
+    self.optional = (
+        # apex_info.pb isn't directly used in the update flow
+        'apex_info.pb',
+        # care_map is available only if dm-verity is enabled.
+        'care_map.pb',
+        'care_map.txt',
+        # compatibility.zip is available only if target supports Treble.
+        'compatibility.zip',
+    )
+
+
+class AbOtaPropertyFiles(StreamingPropertyFiles):
+  """The property-files for A/B OTA that includes payload_metadata.bin info.
+
+  Since P, we expose one more token (aka property-file), in addition to the ones
+  for streaming A/B OTA, for a virtual entry of 'payload_metadata.bin'.
+  'payload_metadata.bin' is the header part of a payload ('payload.bin'), which
+  doesn't exist as a separate ZIP entry, but can be used to verify if the
+  payload can be applied on the given device.
+
+  For backward compatibility, we keep both of the 'ota-streaming-property-files'
+  and the newly added 'ota-property-files' in P. The new token will only be
+  available in 'ota-property-files'.
+  """
+
+  def __init__(self):
+    super(AbOtaPropertyFiles, self).__init__()
+    self.name = 'ota-property-files'
+
+  def _GetPrecomputed(self, input_zip):
+    offset, size = self._GetPayloadMetadataOffsetAndSize(input_zip)
+    return ['payload_metadata.bin:{}:{}'.format(offset, size)]
+
+  @staticmethod
+  def _GetPayloadMetadataOffsetAndSize(input_zip):
+    """Computes the offset and size of the payload metadata for a given package.
+
+    (From system/update_engine/update_metadata.proto)
+    A delta update file contains all the deltas needed to update a system from
+    one specific version to another specific version. The update format is
+    represented by this struct pseudocode:
+
+    struct delta_update_file {
+      char magic[4] = "CrAU";
+      uint64 file_format_version;
+      uint64 manifest_size;  // Size of protobuf DeltaArchiveManifest
+
+      // Only present if format_version > 1:
+      uint32 metadata_signature_size;
+
+      // The Bzip2 compressed DeltaArchiveManifest
+      char manifest[metadata_signature_size];
+
+      // The signature of the metadata (from the beginning of the payload up to
+      // this location, not including the signature itself). This is a
+      // serialized Signatures message.
+      char medatada_signature_message[metadata_signature_size];
+
+      // Data blobs for files, no specific format. The specific offset
+      // and length of each data blob is recorded in the DeltaArchiveManifest.
+      struct {
+        char data[];
+      } blobs[];
+
+      // These two are not signed:
+      uint64 payload_signatures_message_size;
+      char payload_signatures_message[];
+    };
+
+    'payload-metadata.bin' contains all the bytes from the beginning of the
+    payload, till the end of 'medatada_signature_message'.
+    """
+    payload_info = input_zip.getinfo('payload.bin')
+    (payload_offset, payload_size) = GetZipEntryOffset(input_zip, payload_info)
+
+    # Read the underlying raw zipfile at specified offset
+    payload_fp = input_zip.fp
+    payload_fp.seek(payload_offset)
+    header_bin = payload_fp.read(24)
+
+    # network byte order (big-endian)
+    header = struct.unpack("!IQQL", header_bin)
+
+    # 'CrAU'
+    magic = header[0]
+    assert magic == 0x43724155, "Invalid magic: {:x}, computed offset {}" \
+        .format(magic, payload_offset)
+
+    manifest_size = header[2]
+    metadata_signature_size = header[3]
+    metadata_total = 24 + manifest_size + metadata_signature_size
+    assert metadata_total <= payload_size
+
+    return (payload_offset, metadata_total)
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
new file mode 100644
index 0000000..4f342ac
--- /dev/null
+++ b/tools/releasetools/payload_signer.py
@@ -0,0 +1,122 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import common
+import logging
+from common import OPTIONS
+
+logger = logging.getLogger(__name__)
+
+
+class PayloadSigner(object):
+  """A class that wraps the payload signing works.
+
+  When generating a Payload, hashes of the payload and metadata files will be
+  signed with the device key, either by calling an external payload signer or
+  by calling openssl with the package key. This class provides a unified
+  interface, so that callers can just call PayloadSigner.Sign().
+
+  If an external payload signer has been specified (OPTIONS.payload_signer), it
+  calls the signer with the provided args (OPTIONS.payload_signer_args). Note
+  that the signing key should be provided as part of the payload_signer_args.
+  Otherwise without an external signer, it uses the package key
+  (OPTIONS.package_key) and calls openssl for the signing works.
+  """
+
+  def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None):
+    if package_key is None:
+      package_key = OPTIONS.package_key
+    if private_key_suffix is None:
+      private_key_suffix = OPTIONS.private_key_suffix
+
+    if payload_signer is None:
+      # Prepare the payload signing key.
+      private_key = package_key + private_key_suffix
+
+      cmd = ["openssl", "pkcs8", "-in", private_key, "-inform", "DER"]
+      cmd.extend(["-passin", "pass:" + pw] if pw else ["-nocrypt"])
+      signing_key = common.MakeTempFile(prefix="key-", suffix=".key")
+      cmd.extend(["-out", signing_key])
+      common.RunAndCheckOutput(cmd, verbose=True)
+
+      self.signer = "openssl"
+      self.signer_args = ["pkeyutl", "-sign", "-inkey", signing_key,
+                          "-pkeyopt", "digest:sha256"]
+      self.maximum_signature_size = self._GetMaximumSignatureSizeInBytes(
+          signing_key)
+    else:
+      self.signer = payload_signer
+      self.signer_args = OPTIONS.payload_signer_args
+      if OPTIONS.payload_signer_maximum_signature_size:
+        self.maximum_signature_size = int(
+            OPTIONS.payload_signer_maximum_signature_size)
+      else:
+        # The legacy config uses RSA2048 keys.
+        logger.warning("The maximum signature size for payload signer is not"
+                       " set, default to 256 bytes.")
+        self.maximum_signature_size = 256
+
+  @staticmethod
+  def _GetMaximumSignatureSizeInBytes(signing_key):
+    out_signature_size_file = common.MakeTempFile("signature_size")
+    cmd = ["delta_generator", "--out_maximum_signature_size_file={}".format(
+        out_signature_size_file), "--private_key={}".format(signing_key)]
+    common.RunAndCheckOutput(cmd, verbose=True)
+    with open(out_signature_size_file) as f:
+      signature_size = f.read().rstrip()
+    logger.info("%s outputs the maximum signature size: %s", cmd[0],
+                signature_size)
+    return int(signature_size)
+
+  @staticmethod
+  def _Run(cmd):
+    common.RunAndCheckOutput(cmd, stdout=None, stderr=None)
+
+  def SignPayload(self, unsigned_payload):
+
+    # 1. Generate hashes of the payload and metadata files.
+    payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+    cmd = ["brillo_update_payload", "hash",
+           "--unsigned_payload", unsigned_payload,
+           "--signature_size", str(self.maximum_signature_size),
+           "--metadata_hash_file", metadata_sig_file,
+           "--payload_hash_file", payload_sig_file]
+    self._Run(cmd)
+
+    # 2. Sign the hashes.
+    signed_payload_sig_file = self.SignHashFile(payload_sig_file)
+    signed_metadata_sig_file = self.SignHashFile(metadata_sig_file)
+
+    # 3. Insert the signatures back into the payload file.
+    signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                              suffix=".bin")
+    cmd = ["brillo_update_payload", "sign",
+           "--unsigned_payload", unsigned_payload,
+           "--payload", signed_payload_file,
+           "--signature_size", str(self.maximum_signature_size),
+           "--metadata_signature_file", signed_metadata_sig_file,
+           "--payload_signature_file", signed_payload_sig_file]
+    self._Run(cmd)
+    return signed_payload_file
+
+
+  def SignHashFile(self, in_file):
+    """Signs the given input file. Returns the output filename."""
+    out_file = common.MakeTempFile(prefix="signed-", suffix=".bin")
+    cmd = [self.signer] + self.signer_args + ['-in', in_file, '-out', out_file]
+    common.RunAndCheckOutput(cmd)
+    return out_file
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index a68f1ec..d739982 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -43,18 +43,13 @@
   --sign_tool <sign_tool>
       Optional flag that specifies a custom signing tool for the contents of the apex.
 
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
+  --container_pw <name1=passwd,name2=passwd>
+      A mapping of key_name to password
 """
 
 import logging
 import shutil
+import re
 import sys
 
 import apex_utils
@@ -65,8 +60,7 @@
 
 
 def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
-                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None,
-                 sepolicy_key=None, sepolicy_cert=None, fsverity_tool=None):
+                 apk_keys=None, signing_args=None, codename_to_api_level_map=None, sign_tool=None, container_pw=None):
   """Signs the given apex file."""
   with open(apex_file, 'rb') as input_fp:
     apex_data = input_fp.read()
@@ -76,16 +70,13 @@
       apex_data,
       payload_key=payload_key,
       container_key=container_key,
-      container_pw=None,
+      container_pw=container_pw,
       codename_to_api_level_map=codename_to_api_level_map,
       no_hashtree=no_hashtree,
       apk_keys=apk_keys,
       signing_args=signing_args,
       sign_tool=sign_tool,
-      is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name),
-      sepolicy_key=sepolicy_key,
-      sepolicy_cert=sepolicy_cert,
-      fsverity_tool=fsverity_tool)
+      is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name))
 
 
 def main(argv):
@@ -121,12 +112,15 @@
         options['extra_apks'].update({n: key})
     elif o == '--sign_tool':
       options['sign_tool'] = a
-    elif o == '--sepolicy_key':
-      options['sepolicy_key'] = a
-    elif o == '--sepolicy_cert':
-      options['sepolicy_cert'] = a
-    elif o == '--fsverity_tool':
-      options['fsverity_tool'] = a
+    elif o == '--container_pw':
+      passwords = {}
+      pairs = a.split()
+      for pair in pairs:
+        if "=" not in pair:
+          continue
+        tokens = pair.split("=", maxsplit=1)
+        passwords[tokens[0].strip()] = tokens[1].strip()
+      options['container_pw'] = passwords
     else:
       return False
     return True
@@ -142,9 +136,7 @@
           'payload_key=',
           'extra_apks=',
           'sign_tool=',
-          'sepolicy_key=',
-          'sepolicy_cert=',
-          'fsverity_tool='
+          'container_pw=',
       ],
       extra_option_handler=option_handler)
 
@@ -166,9 +158,8 @@
       codename_to_api_level_map=options.get(
           'codename_to_api_level_map', {}),
       sign_tool=options.get('sign_tool', None),
-      sepolicy_key=options.get('sepolicy_key', None),
-      sepolicy_cert=options.get('sepolicy_cert', None),
-      fsverity_tool=options.get('fsverity_tool', None))
+      container_pw=options.get('container_pw'),
+  )
   shutil.copyfile(signed_apex, args[1])
   logger.info("done.")
 
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
deleted file mode 120000
index b5ec59a..0000000
--- a/tools/releasetools/sign_target_files_apks
+++ /dev/null
@@ -1 +0,0 @@
-sign_target_files_apks.py
\ No newline at end of file
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index c803340..4e7274f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -27,7 +27,7 @@
       apkcerts.txt file, or the container key for an APEX. Option may be
       repeated to give multiple extra packages.
 
-  --extra_apex_payload_key <name=key>
+  --extra_apex_payload_key <name,name,...=key>
       Add a mapping for APEX package name to payload signing key, which will
       override the default payload signing key in apexkeys.txt. Note that the
       container key should be overridden via the `--extra_apks` flag above.
@@ -99,15 +99,15 @@
       The second dir will be used for lookup if BOARD_USES_RECOVERY_AS_BOOT is
       set to true.
 
-  --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+  --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
          vbmeta_vendor}_algorithm <algorithm>
-  --avb_{boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
+  --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
          vbmeta_vendor}_key <key>
       Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
       the specified image. Otherwise it uses the existing values in info dict.
 
-  --avb_{apex,boot,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
-         vbmeta_vendor}_extra_args <args>
+  --avb_{apex,boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+         vbmeta_system,vbmeta_vendor}_extra_args <args>
       Specify any additional args that are needed to AVB-sign the image
       (e.g. "--signing_helper /path/to/helper"). The args will be appended to
       the existing ones in info dict.
@@ -137,19 +137,16 @@
   --android_jar_path <path>
       Path to the android.jar to repack the apex file.
 
-  --sepolicy_key <key>
-      Optional flag that specifies the sepolicy signing key, defaults to payload_key for the sepolicy.apex.
-
-  --sepolicy_cert <cert>
-      Optional flag that specifies the sepolicy signing cert.
-
-  --fsverity_tool <path>
-      Optional flag that specifies the path to fsverity tool to sign SEPolicy, defaults to fsverity.
-
   --allow_gsi_debug_sepolicy
       Allow the existence of the file 'userdebug_plat_sepolicy.cil' under
       (/system/system_ext|/system_ext)/etc/selinux.
       If not set, error out when the file exists.
+
+  --override_apk_keys <path>
+      Replace all APK keys with this private key
+
+  --override_apex_keys <path>
+      Replace all APEX keys with this private key
 """
 
 from __future__ import print_function
@@ -191,9 +188,6 @@
 OPTIONS.key_map = {}
 OPTIONS.rebuild_recovery = False
 OPTIONS.replace_ota_keys = False
-OPTIONS.replace_verity_public_key = False
-OPTIONS.replace_verity_private_key = False
-OPTIONS.replace_verity_keyid = False
 OPTIONS.remove_avb_public_keys = None
 OPTIONS.tag_changes = ("-test-keys", "-dev-keys", "+release-keys")
 OPTIONS.avb_keys = {}
@@ -205,10 +199,9 @@
 OPTIONS.android_jar_path = None
 OPTIONS.vendor_partitions = set()
 OPTIONS.vendor_otatools = None
-OPTIONS.sepolicy_key = None
-OPTIONS.sepolicy_cert = None
-OPTIONS.fsverity_tool = None
 OPTIONS.allow_gsi_debug_sepolicy = False
+OPTIONS.override_apk_keys = None
+OPTIONS.override_apex_keys = None
 
 
 AVB_FOOTER_ARGS_BY_PARTITION = {
@@ -226,6 +219,7 @@
     'pvmfw': 'avb_pvmfw_add_hash_footer_args',
     'vendor': 'avb_vendor_add_hashtree_footer_args',
     'vendor_boot': 'avb_vendor_boot_add_hash_footer_args',
+    'vendor_kernel_boot': 'avb_vendor_kernel_boot_add_hash_footer_args',
     'vendor_dlkm': "avb_vendor_dlkm_add_hashtree_footer_args",
     'vbmeta': 'avb_vbmeta_args',
     'vbmeta_system': 'avb_vbmeta_system_args',
@@ -246,8 +240,6 @@
 def IsApexFile(filename):
   return filename.endswith(".apex") or filename.endswith(".capex")
 
-def IsSepolicyApex(filename):
-  return filename.endswith(OPTIONS.sepolicy_name)
 
 def GetApexFilename(filename):
   name = os.path.basename(filename)
@@ -258,6 +250,10 @@
 
 
 def GetApkCerts(certmap):
+  if OPTIONS.override_apk_keys is not None:
+    for apk in certmap.keys():
+      certmap[apk] = OPTIONS.override_apk_keys
+
   # apply the key remapping to the contents of the file
   for apk, cert in certmap.items():
     certmap[apk] = OPTIONS.key_map.get(cert, cert)
@@ -270,24 +266,6 @@
 
   return certmap
 
-def GetSepolicyKeys(keys_info):
-  """Gets SEPolicy signing keys applying overrides from command line options.
-
-  Args:
-    keys_info: A dict that maps from the SEPolicy APEX filename to a tuple of
-    (sepolicy_key, sepolicy_cert, fsverity_tool).
-
-  Returns:
-    A dict that contains the updated APEX key mapping, which should be used for
-    the current signing.
-  """
-  for name in keys_info:
-      (sepolicy_key, sepolicy_cert, fsverity_tool) = keys_info[name]
-      sepolicy_key = OPTIONS.sepolicy_key if OPTIONS.sepolicy_key else sepolicy_key
-      sepolicy_cert = OPTIONS.sepolicy_cert if OPTIONS.sepolicy_cert else sepolicy_cert
-      fsverity_tool = OPTIONS.fsverity_tool if OPTIONS.fsverity_tool else fsverity_tool
-      keys_info[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-  return keys_info
 
 def GetApexKeys(keys_info, key_map):
   """Gets APEX payload and container signing keys by applying the mapping rules.
@@ -306,6 +284,15 @@
   Raises:
     AssertionError: On invalid container / payload key overrides.
   """
+  if OPTIONS.override_apex_keys is not None:
+    for apex in keys_info.keys():
+      keys_info[apex] = (OPTIONS.override_apex_keys, keys_info[apex][1], keys_info[apex][2])
+
+  if OPTIONS.override_apk_keys is not None:
+    key = key_map.get(OPTIONS.override_apk_keys, OPTIONS.override_apk_keys)
+    for apex in keys_info.keys():
+      keys_info[apex] = (keys_info[apex][0], key, keys_info[apex][2])
+
   # Apply all the --extra_apex_payload_key options to override the payload
   # signing keys in the given keys_info.
   for apex, key in OPTIONS.extra_apex_payload_keys.items():
@@ -550,7 +537,7 @@
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
                        apk_keys, apex_keys, key_passwords,
                        platform_api_level, codename_to_api_level_map,
-                       compressed_extension, sepolicy_keys):
+                       compressed_extension):
   # maxsize measures the maximum filename length, including the ones to be
   # skipped.
   try:
@@ -618,17 +605,6 @@
         print("           : %-*s payload   (%s)" % (
             maxsize, name, payload_key))
 
-        sepolicy_key = None
-        sepolicy_cert = None
-        fsverity_tool = None
-
-        if IsSepolicyApex(name):
-          (sepolicy_key, sepolicy_cert, fsverity_tool) = sepolicy_keys[name]
-          print("           : %-*s sepolicy key   (%s)" % (
-            maxsize, name, sepolicy_key))
-          print("           : %-*s sepolicy cert  (%s)" % (
-            maxsize, name, sepolicy_cert))
-
         signed_apex = apex_utils.SignApex(
             misc_info['avb_avbtool'],
             data,
@@ -639,11 +615,7 @@
             codename_to_api_level_map,
             no_hashtree=None,  # Let apex_util determine if hash tree is needed
             signing_args=OPTIONS.avb_extra_args.get('apex'),
-            sign_tool=sign_tool,
-            is_sepolicy=IsSepolicyApex(name),
-            sepolicy_key=sepolicy_key,
-            sepolicy_cert=sepolicy_cert,
-            fsverity_tool=fsverity_tool)
+            sign_tool=sign_tool)
         common.ZipWrite(output_tf_zip, signed_apex, filename)
 
       else:
@@ -688,11 +660,6 @@
     elif filename == "META/misc_info.txt":
       pass
 
-    # Skip verity public key if we will replace it.
-    elif (OPTIONS.replace_verity_public_key and
-          filename in ("BOOT/RAMDISK/verity_key",
-                       "ROOT/verity_key")):
-      pass
     elif (OPTIONS.remove_avb_public_keys and
           (filename.startswith("BOOT/RAMDISK/avb/") or
            filename.startswith("BOOT/RAMDISK/first_stage_ramdisk/avb/"))):
@@ -706,10 +673,6 @@
         # Copy it verbatim if we don't want to remove it.
         common.ZipWriteStr(output_tf_zip, out_info, data)
 
-    # Skip verity keyid (for system_root_image use) if we will replace it.
-    elif OPTIONS.replace_verity_keyid and filename == "BOOT/cmdline":
-      pass
-
     # Skip the vbmeta digest as we will recalculate it.
     elif filename == "META/vbmeta_digest.txt":
       pass
@@ -791,27 +754,6 @@
   if OPTIONS.replace_ota_keys:
     ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
 
-  # Replace the keyid string in misc_info dict.
-  if OPTIONS.replace_verity_private_key:
-    ReplaceVerityPrivateKey(misc_info, OPTIONS.replace_verity_private_key[1])
-
-  if OPTIONS.replace_verity_public_key:
-    # Replace the one in root dir in system.img.
-    ReplaceVerityPublicKey(
-        output_tf_zip, 'ROOT/verity_key', OPTIONS.replace_verity_public_key[1])
-
-    if not system_root_image:
-      # Additionally replace the copy in ramdisk if not using system-as-root.
-      ReplaceVerityPublicKey(
-          output_tf_zip,
-          'BOOT/RAMDISK/verity_key',
-          OPTIONS.replace_verity_public_key[1])
-
-  # Replace the keyid string in BOOT/cmdline.
-  if OPTIONS.replace_verity_keyid:
-    ReplaceVerityKeyId(input_tf_zip, output_tf_zip,
-                       OPTIONS.replace_verity_keyid[1])
-
   # Replace the AVB signing keys, if any.
   ReplaceAvbSigningKeys(misc_info)
 
@@ -927,7 +869,7 @@
         pieces[-1] = EditTags(pieces[-1])
         value = "/".join(pieces)
       elif key == "ro.build.description":
-        pieces = value.split(" ")
+        pieces = value.split()
         assert pieces[-1].endswith("-keys")
         pieces[-1] = EditTags(pieces[-1])
         value = " ".join(pieces)
@@ -959,7 +901,7 @@
   certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
   for k in keys:
     common.ZipWrite(certs_zip, k)
-  common.ZipClose(certs_zip)
+  certs_zip.close()
   common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
 
 
@@ -1028,64 +970,6 @@
     WriteOtacerts(output_tf_zip, info.filename, mapped_keys + extra_keys)
 
 
-def ReplaceVerityPublicKey(output_zip, filename, key_path):
-  """Replaces the verity public key at the given path in the given zip.
-
-  Args:
-    output_zip: The output target_files zip.
-    filename: The archive name in the output zip.
-    key_path: The path to the public key.
-  """
-  print("Replacing verity public key with %s" % (key_path,))
-  common.ZipWrite(output_zip, key_path, arcname=filename)
-
-
-def ReplaceVerityPrivateKey(misc_info, key_path):
-  """Replaces the verity private key in misc_info dict.
-
-  Args:
-    misc_info: The info dict.
-    key_path: The path to the private key in PKCS#8 format.
-  """
-  print("Replacing verity private key with %s" % (key_path,))
-  misc_info["verity_key"] = key_path
-
-
-def ReplaceVerityKeyId(input_zip, output_zip, key_path):
-  """Replaces the veritykeyid parameter in BOOT/cmdline.
-
-  Args:
-    input_zip: The input target_files zip, which should be already open.
-    output_zip: The output target_files zip, which should be already open and
-        writable.
-    key_path: The path to the PEM encoded X.509 certificate.
-  """
-  in_cmdline = input_zip.read("BOOT/cmdline").decode()
-  # Copy in_cmdline to output_zip if veritykeyid is not present.
-  if "veritykeyid" not in in_cmdline:
-    common.ZipWriteStr(output_zip, "BOOT/cmdline", in_cmdline)
-    return
-
-  out_buffer = []
-  for param in in_cmdline.split():
-    if "veritykeyid" not in param:
-      out_buffer.append(param)
-      continue
-
-    # Extract keyid using openssl command.
-    p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
-                   stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    keyid, stderr = p.communicate()
-    assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
-    keyid = re.search(
-        r'keyid:([0-9a-fA-F:]*)', keyid).group(1).replace(':', '').lower()
-    print("Replacing verity keyid with {}".format(keyid))
-    out_buffer.append("veritykeyid=id:%s" % (keyid,))
-
-  out_cmdline = ' '.join(out_buffer).strip() + '\n'
-  common.ZipWriteStr(output_zip, "BOOT/cmdline", out_cmdline)
-
-
 def ReplaceMiscInfoTxt(input_zip, output_zip, misc_info):
   """Replaces META/misc_info.txt.
 
@@ -1144,7 +1028,7 @@
 
     tokens = []
     changed = False
-    for token in args.split(' '):
+    for token in args.split():
       fingerprint_key = 'com.android.build.{}.fingerprint'.format(partition)
       if not token.startswith(fingerprint_key):
         tokens.append(token)
@@ -1253,24 +1137,20 @@
 def ReadApexKeysInfo(tf_zip):
   """Parses the APEX keys info from a given target-files zip.
 
-  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns
-  two dicts, the first one contains the mapping from APEX names
-  (e.g. com.android.tzdata) to a tuple of (payload_key, container_key,
-  sign_tool). The second one maps the sepolicy APEX name to a tuple containing
-  (sepolicy_key, sepolicy_cert, fsverity_tool).
+  Given a target-files ZipFile, parses the META/apexkeys.txt entry and returns a
+  dict that contains the mapping from APEX names (e.g. com.android.tzdata) to a
+  tuple of (payload_key, container_key, sign_tool).
 
   Args:
     tf_zip: The input target_files ZipFile (already open).
 
   Returns:
-    name : (payload_key, container_key, sign_tool)
+    (payload_key, container_key, sign_tool):
       - payload_key contains the path to the payload signing key
       - container_key contains the path to the container signing key
       - sign_tool is an apex-specific signing tool for its payload contents
-    name : (sepolicy_key, sepolicy_cert, fsverity_tool)
   """
   keys = {}
-  sepolicy_keys = {}
   for line in tf_zip.read('META/apexkeys.txt').decode().split('\n'):
     line = line.strip()
     if not line:
@@ -1281,9 +1161,6 @@
         r'private_key="(?P<PAYLOAD_PRIVATE_KEY>.*)"\s+'
         r'container_certificate="(?P<CONTAINER_CERT>.*)"\s+'
         r'container_private_key="(?P<CONTAINER_PRIVATE_KEY>.*?)"'
-        r'(\s+sepolicy_key="(?P<SEPOLICY_KEY>.*?)")?'
-        r'(\s+sepolicy_certificate="(?P<SEPOLICY_CERT>.*?)")?'
-        r'(\s+fsverity_tool="(?P<FSVERITY_TOOL>.*?)")?'
         r'(\s+partition="(?P<PARTITION>.*?)")?'
         r'(\s+sign_tool="(?P<SIGN_TOOL>.*?)")?$',
         line)
@@ -1312,18 +1189,12 @@
             container_private_key, OPTIONS.private_key_suffix):
       container_key = container_cert[:-len(OPTIONS.public_key_suffix)]
     else:
-      raise ValueError("Failed to parse container keys: \n{} **** {}".format(container_cert, container_private_key))
+      raise ValueError("Failed to parse container keys: \n{}".format(line))
 
     sign_tool = matches.group("SIGN_TOOL")
     keys[name] = (payload_private_key, container_key, sign_tool)
 
-    if IsSepolicyApex(name):
-      sepolicy_key = matches.group('SEPOLICY_KEY')
-      sepolicy_cert = matches.group('SEPOLICY_CERT')
-      fsverity_tool = matches.group('FSVERITY_TOOL')
-      sepolicy_keys[name] = (sepolicy_key, sepolicy_cert, fsverity_tool)
-
-  return keys, sepolicy_keys
+  return keys
 
 
 def BuildVendorPartitions(output_zip_path):
@@ -1354,14 +1225,16 @@
     vendor_misc_info["no_boot"] = "true"  # boot
     vendor_misc_info["vendor_boot"] = "false"  # vendor_boot
     vendor_misc_info["no_recovery"] = "true"  # recovery
+    vendor_misc_info["avb_enable"] = "false"  # vbmeta
 
   vendor_misc_info["board_bpt_enable"] = "false"  # partition-table
   vendor_misc_info["has_dtbo"] = "false"  # dtbo
   vendor_misc_info["has_pvmfw"] = "false"  # pvmfw
   vendor_misc_info["avb_custom_images_partition_list"] = ""  # custom images
-  vendor_misc_info["avb_enable"] = "false"  # vbmeta
+  vendor_misc_info["avb_building_vbmeta_image"] = "false" # skip building vbmeta
   vendor_misc_info["use_dynamic_partitions"] = "false"  # super_empty
   vendor_misc_info["build_super_partition"] = "false"  # super split
+  vendor_misc_info["avb_vbmeta_system"] = ""  # skip building vbmeta_system
   with open(vendor_misc_info_path, "w") as output:
     for key in sorted(vendor_misc_info):
       output.write("{}={}\n".format(key, vendor_misc_info[key]))
@@ -1413,9 +1286,14 @@
       img_file_path = "IMAGES/{}.img".format(p)
       map_file_path = "IMAGES/{}.map".format(p)
       common.ZipWrite(output_zip, os.path.join(vendor_tempdir, img_file_path), img_file_path)
-      common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
-    # copy recovery patch & install.sh
+      if os.path.exists(os.path.join(vendor_tempdir, map_file_path)):
+        common.ZipWrite(output_zip, os.path.join(vendor_tempdir, map_file_path), map_file_path)
+    # copy recovery.img, boot.img, recovery patch & install.sh
     if OPTIONS.rebuild_recovery:
+      recovery_img = "IMAGES/recovery.img"
+      boot_img = "IMAGES/boot.img"
+      common.ZipWrite(output_zip, os.path.join(vendor_tempdir, recovery_img), recovery_img)
+      common.ZipWrite(output_zip, os.path.join(vendor_tempdir, boot_img), boot_img)
       recovery_patch_path = "VENDOR/recovery-from-boot.p"
       recovery_sh_path = "VENDOR/bin/install-recovery.sh"
       common.ZipWrite(output_zip, os.path.join(vendor_tempdir, recovery_patch_path), recovery_patch_path)
@@ -1433,8 +1311,9 @@
       for n in names:
         OPTIONS.extra_apks[n] = key
     elif o == "--extra_apex_payload_key":
-      apex_name, key = a.split("=")
-      OPTIONS.extra_apex_payload_keys[apex_name] = key
+      apex_names, key = a.split("=")
+      for name in apex_names.split(","):
+        OPTIONS.extra_apex_payload_keys[name] = key
     elif o == "--skip_apks_with_path_prefix":
       # Check the prefix, which must be in all upper case.
       prefix = a.split('/')[0]
@@ -1456,11 +1335,14 @@
         new.append(i[0] + i[1:].strip())
       OPTIONS.tag_changes = tuple(new)
     elif o == "--replace_verity_public_key":
-      OPTIONS.replace_verity_public_key = (True, a)
+      raise ValueError("--replace_verity_public_key is no longer supported,"
+                       " please switch to AVB")
     elif o == "--replace_verity_private_key":
-      OPTIONS.replace_verity_private_key = (True, a)
+      raise ValueError("--replace_verity_private_key is no longer supported,"
+                       " please switch to AVB")
     elif o == "--replace_verity_keyid":
-      OPTIONS.replace_verity_keyid = (True, a)
+      raise ValueError("--replace_verity_keyid is no longer supported, please"
+                       " switch to AVB")
     elif o == "--remove_avb_public_keys":
       OPTIONS.remove_avb_public_keys = a.split(",")
     elif o == "--avb_vbmeta_key":
@@ -1481,6 +1363,12 @@
       OPTIONS.avb_algorithms['dtbo'] = a
     elif o == "--avb_dtbo_extra_args":
       OPTIONS.avb_extra_args['dtbo'] = a
+    elif o == "--avb_recovery_key":
+      OPTIONS.avb_keys['recovery'] = a
+    elif o == "--avb_recovery_algorithm":
+      OPTIONS.avb_algorithms['recovery'] = a
+    elif o == "--avb_recovery_extra_args":
+      OPTIONS.avb_extra_args['recovery'] = a
     elif o == "--avb_system_key":
       OPTIONS.avb_keys['system'] = a
     elif o == "--avb_system_algorithm":
@@ -1535,14 +1423,12 @@
       OPTIONS.vendor_otatools = a
     elif o == "--vendor_partitions":
       OPTIONS.vendor_partitions = set(a.split(","))
-    elif o == '--sepolicy_key':
-      OPTIONS.sepolicy_key = a
-    elif o == '--sepolicy_cert':
-      OPTIONS.sepolicy_cert = a
-    elif o == '--fsverity_tool':
-      OPTIONS.fsverity_tool = a
     elif o == "--allow_gsi_debug_sepolicy":
       OPTIONS.allow_gsi_debug_sepolicy = True
+    elif o == "--override_apk_keys":
+      OPTIONS.override_apk_keys = a
+    elif o == "--override_apex_keys":
+      OPTIONS.override_apex_keys = a
     else:
       return False
     return True
@@ -1572,6 +1458,9 @@
           "avb_dtbo_algorithm=",
           "avb_dtbo_key=",
           "avb_dtbo_extra_args=",
+          "avb_recovery_algorithm=",
+          "avb_recovery_key=",
+          "avb_recovery_extra_args=",
           "avb_system_algorithm=",
           "avb_system_key=",
           "avb_system_extra_args=",
@@ -1595,10 +1484,9 @@
           "gki_signing_extra_args=",
           "vendor_partitions=",
           "vendor_otatools=",
-          "sepolicy_key=",
-          "sepolicy_cert=",
-          "fsverity_tool=",
           "allow_gsi_debug_sepolicy",
+          "override_apk_keys=",
+          "override_apex_keys=",
       ],
       extra_option_handler=option_handler)
 
@@ -1620,9 +1508,8 @@
   apk_keys_info, compressed_extension = common.ReadApkCerts(input_zip)
   apk_keys = GetApkCerts(apk_keys_info)
 
-  apex_keys_info, sepolicy_keys_info = ReadApexKeysInfo(input_zip)
+  apex_keys_info = ReadApexKeysInfo(input_zip)
   apex_keys = GetApexKeys(apex_keys_info, apk_keys)
-  sepolicy_keys = GetSepolicyKeys(sepolicy_keys_info)
 
   # TODO(xunchang) check for the apks inside the apex files, and abort early if
   # the keys are not available.
@@ -1640,10 +1527,10 @@
   ProcessTargetFiles(input_zip, output_zip, misc_info,
                      apk_keys, apex_keys, key_passwords,
                      platform_api_level, codename_to_api_level_map,
-                     compressed_extension, sepolicy_keys)
+                     compressed_extension)
 
-  common.ZipClose(input_zip)
-  common.ZipClose(output_zip)
+  input_zip.close()
+  output_zip.close()
 
   if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
     BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 524c0f2..a2f7e9e 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -41,8 +41,7 @@
   """
 
   def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
-               mode="rb", build_map=True, allow_shared_blocks=False,
-               hashtree_info_generator=None):
+               mode="rb", build_map=True, allow_shared_blocks=False):
     self.simg_f = f = open(simg_fn, mode)
 
     header_bin = f.read(28)
@@ -74,8 +73,6 @@
         blk_sz, total_chunks)
 
     if not build_map:
-      assert not hashtree_info_generator, \
-        "Cannot generate the hashtree info without building the offset map."
       return
 
     pos = 0   # in blocks
@@ -83,7 +80,7 @@
     self.offset_map = offset_map = []
     self.clobbered_blocks = rangelib.RangeSet(data=clobbered_blocks)
 
-    for i in range(total_chunks):
+    for _ in range(total_chunks):
       header_bin = f.read(12)
       header = struct.unpack("<2H2I", header_bin)
       chunk_type = header[0]
@@ -114,16 +111,6 @@
         if data_sz != 0:
           raise ValueError("Don't care chunk input size is non-zero (%u)" %
                            (data_sz))
-        # Fills the don't care data ranges with zeros.
-        # TODO(xunchang) pass the care_map to hashtree info generator.
-        if hashtree_info_generator:
-          fill_data = '\x00' * 4
-          # In order to compute verity hashtree on device, we need to write
-          # zeros explicitly to the don't care ranges. Because these ranges may
-          # contain non-zero data from the previous build.
-          care_data.append(pos)
-          care_data.append(pos + chunk_sz)
-          offset_map.append((pos, chunk_sz, None, fill_data))
 
         pos += chunk_sz
 
@@ -150,10 +137,6 @@
     extended = extended.intersect(all_blocks).subtract(self.care_map)
     self.extended = extended
 
-    self.hashtree_info = None
-    if hashtree_info_generator:
-      self.hashtree_info = hashtree_info_generator.Generate(self)
-
     if file_map_fn:
       self.LoadFileBlockMap(file_map_fn, self.clobbered_blocks,
                             allow_shared_blocks)
@@ -183,6 +166,11 @@
   def ReadRangeSet(self, ranges):
     return [d for d in self._GetRangeData(ranges)]
 
+  def ReadBlocks(self, start=0, num_blocks=None):
+    if num_blocks is None:
+      num_blocks = self.total_blocks
+    return self._GetRangeData([(start, start + num_blocks)])
+
   def TotalSha1(self, include_clobbered_blocks=False):
     """Return the SHA-1 hash of all data in the 'care' regions.
 
@@ -286,8 +274,6 @@
         remaining = remaining.subtract(ranges)
 
     remaining = remaining.subtract(clobbered_blocks)
-    if self.hashtree_info:
-      remaining = remaining.subtract(self.hashtree_info.hashtree_range)
 
     # For all the remaining blocks in the care_map (ie, those that
     # aren't part of the data for any file nor part of the clobbered_blocks),
@@ -350,8 +336,6 @@
         out["__NONZERO-%d" % i] = rangelib.RangeSet(data=blocks)
     if clobbered_blocks:
       out["__COPY"] = clobbered_blocks
-    if self.hashtree_info:
-      out["__HASHTREE"] = self.hashtree_info.hashtree_range
 
   def ResetFileMap(self):
     """Throw away the file map and treat the entire image as
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index a5850d3..7b5476d 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -16,15 +16,16 @@
 
 import os
 import os.path
+import tempfile
 import zipfile
 
 import common
 import test_utils
 from add_img_to_target_files import (
     AddPackRadioImages,
+    AddCareMapForAbOta, GetCareMap,
     CheckAbOtaImages)
 from rangelib import RangeSet
-from common import AddCareMapForAbOta, GetCareMap
 
 
 OPTIONS = common.OPTIONS
@@ -124,9 +125,6 @@
   def _test_AddCareMapForAbOta():
     """Helper function to set up the test for test_AddCareMapForAbOta()."""
     OPTIONS.info_dict = {
-        'extfs_sparse_flag' : '-s',
-        'system_image_size' : 65536,
-        'vendor_image_size' : 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
         'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -149,13 +147,13 @@
     system_image = test_utils.construct_sparse_image([
         (0xCAC1, 6),
         (0xCAC3, 4),
-        (0xCAC1, 8)])
+        (0xCAC1, 6)], "system")
     vendor_image = test_utils.construct_sparse_image([
-        (0xCAC2, 12)])
+        (0xCAC2, 10)], "vendor")
 
     image_paths = {
-        'system' : system_image,
-        'vendor' : vendor_image,
+        'system': system_image,
+        'vendor': vendor_image,
     }
     return image_paths
 
@@ -210,9 +208,6 @@
     """Tests the case for device using AVB."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
-        'extfs_sparse_flag': '-s',
-        'system_image_size': 65536,
-        'vendor_image_size': 40960,
         'avb_system_hashtree_enable': 'true',
         'avb_vendor_hashtree_enable': 'true',
         'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -244,9 +239,6 @@
     """Tests the case for partitions without fingerprint."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
-        'extfs_sparse_flag' : '-s',
-        'system_image_size' : 65536,
-        'vendor_image_size' : 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
     }
@@ -255,8 +247,9 @@
     AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
 
     expected = ['system', RangeSet("0-5 10-15").to_string_raw(), "unknown",
-                "unknown", 'vendor', RangeSet("0-9").to_string_raw(), "unknown",
-                "unknown"]
+                "unknown", 'vendor', RangeSet(
+        "0-9").to_string_raw(), "unknown",
+        "unknown"]
 
     self._verifyCareMap(expected, care_map_file)
 
@@ -265,9 +258,6 @@
     """Tests the case for partitions with thumbprint."""
     image_paths = self._test_AddCareMapForAbOta()
     OPTIONS.info_dict = {
-        'extfs_sparse_flag': '-s',
-        'system_image_size': 65536,
-        'vendor_image_size': 40960,
         'system_verity_block_device': '/dev/block/system',
         'vendor_verity_block_device': '/dev/block/vendor',
         'system.build.prop': common.PartitionBuildProps.FromDictionary(
@@ -297,9 +287,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_skipPartition(self):
     image_paths = self._test_AddCareMapForAbOta()
-
-    # Remove vendor_image_size to invalidate the care_map for vendor.img.
-    del OPTIONS.info_dict['vendor_image_size']
+    test_utils.erase_avb_footer(image_paths["vendor"])
 
     care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
     AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
@@ -313,10 +301,8 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_AddCareMapForAbOta_skipAllPartitions(self):
     image_paths = self._test_AddCareMapForAbOta()
-
-    # Remove the image_size properties for all the partitions.
-    del OPTIONS.info_dict['system_image_size']
-    del OPTIONS.info_dict['vendor_image_size']
+    test_utils.erase_avb_footer(image_paths["system"])
+    test_utils.erase_avb_footer(image_paths["vendor"])
 
     care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.pb')
     AddCareMapForAbOta(care_map_file, ['system', 'vendor'], image_paths)
@@ -395,35 +381,18 @@
     sparse_image = test_utils.construct_sparse_image([
         (0xCAC1, 6),
         (0xCAC3, 4),
-        (0xCAC1, 6)])
-    OPTIONS.info_dict = {
-        'extfs_sparse_flag' : '-s',
-        'system_image_size' : 53248,
-    }
+        (0xCAC1, 6)], "system")
     name, care_map = GetCareMap('system', sparse_image)
     self.assertEqual('system', name)
-    self.assertEqual(RangeSet("0-5 10-12").to_string_raw(), care_map)
+    self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), care_map)
 
   def test_GetCareMap_invalidPartition(self):
     self.assertRaises(AssertionError, GetCareMap, 'oem', None)
 
-  def test_GetCareMap_invalidAdjustedPartitionSize(self):
-    sparse_image = test_utils.construct_sparse_image([
-        (0xCAC1, 6),
-        (0xCAC3, 4),
-        (0xCAC1, 6)])
-    OPTIONS.info_dict = {
-        'extfs_sparse_flag' : '-s',
-        'system_image_size' : -45056,
-    }
-    self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
-
   def test_GetCareMap_nonSparseImage(self):
-    OPTIONS.info_dict = {
-        'system_image_size' : 53248,
-    }
-    # 'foo' is the image filename, which is expected to be not used by
-    # GetCareMap().
-    name, care_map = GetCareMap('system', 'foo')
-    self.assertEqual('system', name)
-    self.assertEqual(RangeSet("0-12").to_string_raw(), care_map)
+    with tempfile.NamedTemporaryFile() as tmpfile:
+      tmpfile.truncate(4096 * 13)
+      test_utils.append_avb_footer(tmpfile.name, "system")
+      name, care_map = GetCareMap('system', tmpfile.name)
+      self.assertEqual('system', name)
+      self.assertEqual(RangeSet("0-12").to_string_raw(), care_map)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index f973263..8c9655ad0 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
     info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
     build_info = common.BuildInfo(info_dict)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/build-id/'
-      'version-incremental:build-type/build-tags', build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/build-id/'
+        'version-incremental:build-type/build-tags', build_info.fingerprint)
 
     build_props = info_dict['build.prop'].build_props
     del build_props['ro.build.id']
     build_props['ro.build.legacy.id'] = 'legacy-build-id'
     build_info = common.BuildInfo(info_dict, use_legacy_id=True)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
     self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
                       False)
@@ -241,9 +241,9 @@
     info_dict['vbmeta_digest'] = 'abcde12345'
     build_info = common.BuildInfo(info_dict, use_legacy_id=False)
     self.assertEqual(
-      'product-brand/product-name/product-device:version-release/'
-      'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
-      build_info.fingerprint)
+        'product-brand/product-name/product-device:version-release/'
+        'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+        build_info.fingerprint)
 
   def test___getitem__(self):
     target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
     info_dict['build.prop'].build_props[
         'ro.product.property_source_order'] = 'bad-source'
     with self.assertRaisesRegexp(common.ExternalError,
-        'Invalid ro.product.property_source_order'):
+                                 'Invalid ro.product.property_source_order'):
       info = common.BuildInfo(info_dict, None)
       info.GetBuildProp('ro.product.device')
 
@@ -459,7 +459,7 @@
       time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
       common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
-      common.ZipClose(zip_file)
+      zip_file.close()
 
       self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
                    test_file_name, expected_stat, expected_mode,
@@ -494,7 +494,7 @@
         expected_mode = extra_args.get("perms", zinfo_perms)
 
       common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
-      common.ZipClose(zip_file)
+      zip_file.close()
 
       self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
                    expected_mode=expected_mode,
@@ -536,7 +536,7 @@
 
       common.ZipWrite(zip_file, test_file_name, **extra_args)
       common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
-      common.ZipClose(zip_file)
+      zip_file.close()
 
       # Verify the contents written by ZipWrite().
       self._verify(zip_file, zip_file_name, arcname_large,
@@ -551,12 +551,6 @@
       os.remove(zip_file_name)
       os.remove(test_file_name)
 
-  def _test_reset_ZIP64_LIMIT(self, func, *args):
-    default_limit = (1 << 31) - 1
-    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
-    func(*args)
-    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
-
   def test_ZipWrite(self):
     file_contents = os.urandom(1024)
     self._test_ZipWrite(file_contents)
@@ -581,7 +575,7 @@
     })
 
   def test_ZipWrite_resets_ZIP64_LIMIT(self):
-    self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
+    self._test_ZipWrite("")
 
   def test_ZipWriteStr(self):
     random_string = os.urandom(1024)
@@ -632,9 +626,9 @@
     })
 
   def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
-    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
+    self._test_ZipWriteStr('foo', b'')
     zinfo = zipfile.ZipInfo(filename="foo")
-    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
+    self._test_ZipWriteStr(zinfo, b'')
 
   def test_bug21309935(self):
     zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -656,7 +650,7 @@
       zinfo = zipfile.ZipInfo(filename="qux")
       zinfo.external_attr = 0o700 << 16
       common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
-      common.ZipClose(zip_file)
+      zip_file.close()
 
       self._verify(zip_file, zip_file_name, "foo",
                    sha1(random_string).hexdigest(),
@@ -683,7 +677,7 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
-      common.ZipClose(output_zip)
+      output_zip.close()
     zip_file.close()
 
     try:
@@ -731,8 +725,8 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
       common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
       common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
-      common.ZipClose(output_zip)
-    common.ZipClose(output_zip)
+      output_zip.close()
+    output_zip.close()
     return zip_file
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -819,9 +813,9 @@
   )
 
   APKCERTS_CERTMAP1 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   APKCERTS_TXT2 = (
@@ -836,10 +830,10 @@
   )
 
   APKCERTS_CERTMAP2 = {
-      'Compressed1.apk' : 'certs/compressed1',
-      'Compressed2a.apk' : 'certs/compressed2',
-      'Compressed2b.apk' : 'certs/compressed2',
-      'Compressed3.apk' : 'certs/compressed3',
+      'Compressed1.apk': 'certs/compressed1',
+      'Compressed2a.apk': 'certs/compressed2',
+      'Compressed2b.apk': 'certs/compressed2',
+      'Compressed3.apk': 'certs/compressed3',
   }
 
   APKCERTS_TXT3 = (
@@ -848,7 +842,7 @@
   )
 
   APKCERTS_CERTMAP3 = {
-      'Compressed4.apk' : 'certs/compressed4',
+      'Compressed4.apk': 'certs/compressed4',
   }
 
   # Test parsing with no optional fields, both optional fields, and only the
@@ -865,9 +859,9 @@
   )
 
   APKCERTS_CERTMAP4 = {
-      'RecoveryLocalizer.apk' : 'certs/devkey',
-      'Settings.apk' : 'build/make/target/product/security/platform',
-      'TV.apk' : 'PRESIGNED',
+      'RecoveryLocalizer.apk': 'certs/devkey',
+      'Settings.apk': 'build/make/target/product/security/platform',
+      'TV.apk': 'PRESIGNED',
   }
 
   def setUp(self):
@@ -971,7 +965,7 @@
     extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
     extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
     with open(extracted_from_privkey, 'rb') as privkey_fp, \
-        open(extracted_from_pubkey, 'rb') as pubkey_fp:
+            open(extracted_from_pubkey, 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
@@ -1235,7 +1229,8 @@
     self.assertEqual(
         '1-5 9-10',
         sparse_image.file_map['//system/file1'].extra['text_str'])
-    self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
+    self.assertTrue(
+        sparse_image.file_map['//system/file2'].extra['incomplete'])
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
@@ -1343,7 +1338,7 @@
       'recovery_api_version': 3,
       'fstab_version': 2,
       'system_root_image': 'true',
-      'no_recovery' : 'true',
+      'no_recovery': 'true',
       'recovery_as_boot': 'true',
   }
 
@@ -1664,6 +1659,7 @@
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
 
+
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
 
@@ -1673,7 +1669,7 @@
   def setUp(self):
     self._tempdir = common.MakeTempDir()
     # Create a fake dict that contains the fstab info for boot&recovery.
-    self._info = {"fstab" : {}}
+    self._info = {"fstab": {}}
     fake_fstab = [
         "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2020,11 +2016,11 @@
           input_zip, 'odm', placeholder_values)
 
     self.assertEqual({
-      'ro.odm.build.date.utc': '1578430045',
-      'ro.odm.build.fingerprint':
-      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-      'ro.product.odm.device': 'coral',
-      'ro.product.odm.name': 'product1',
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
+        'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
     with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2186,3 +2182,29 @@
       }
       self.assertRaises(ValueError, common.PartitionBuildProps.FromInputFile,
                         input_zip, 'odm', placeholder_values)
+
+  def test_partitionBuildProps_fromInputFile_deepcopy(self):
+    build_prop = [
+        'ro.odm.build.date.utc=1578430045',
+        'ro.odm.build.fingerprint='
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device=coral',
+    ]
+    input_file = self._BuildZipFile({
+        'ODM/etc/build.prop': '\n'.join(build_prop),
+    })
+
+    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
+      placeholder_values = {
+          'ro.boot.product.device_name': ['std', 'pro']
+      }
+      partition_props = common.PartitionBuildProps.FromInputFile(
+          input_zip, 'odm', placeholder_values)
+
+    copied_props = copy.deepcopy(partition_props)
+    self.assertEqual({
+        'ro.odm.build.date.utc': '1578430045',
+        'ro.odm.build.fingerprint':
+        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+        'ro.product.odm.device': 'coral',
+    }, copied_props.build_props)
diff --git a/tools/releasetools/test_merge_ota.py b/tools/releasetools/test_merge_ota.py
new file mode 100644
index 0000000..4fa7c02
--- /dev/null
+++ b/tools/releasetools/test_merge_ota.py
@@ -0,0 +1,86 @@
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+import os
+import tempfile
+import test_utils
+import merge_ota
+import update_payload
+from update_metadata_pb2 import DynamicPartitionGroup
+from update_metadata_pb2 import DynamicPartitionMetadata
+from test_utils import SkipIfExternalToolsUnavailable, ReleaseToolsTestCase
+
+
+class MergeOtaTest(ReleaseToolsTestCase):
+  def setUp(self) -> None:
+    self.testdata_dir = test_utils.get_testdata_dir()
+    return super().setUp()
+
+  @SkipIfExternalToolsUnavailable()
+  def test_MergeThreeOtas(self):
+    ota1 = os.path.join(self.testdata_dir, "tuna_vbmeta.zip")
+    ota2 = os.path.join(self.testdata_dir, "tuna_vbmeta_system.zip")
+    ota3 = os.path.join(self.testdata_dir, "tuna_vbmeta_vendor.zip")
+    payloads = [update_payload.Payload(ota) for ota in [ota1, ota2, ota3]]
+    with tempfile.NamedTemporaryFile() as output_file:
+      merge_ota.main(["merge_ota", "-v", ota1, ota2, ota3,
+                     "--output", output_file.name])
+      payload = update_payload.Payload(output_file.name)
+      partition_names = [
+          part.partition_name for part in payload.manifest.partitions]
+      self.assertEqual(partition_names, [
+                       "vbmeta", "vbmeta_system", "vbmeta_vendor"])
+      payload.CheckDataHash()
+      for i in range(3):
+        self.assertEqual(payload.manifest.partitions[i].old_partition_info,
+                         payloads[i].manifest.partitions[0].old_partition_info)
+        self.assertEqual(payload.manifest.partitions[i].new_partition_info,
+                         payloads[i].manifest.partitions[0].new_partition_info)
+
+  def test_MergeDAPSnapshotDisabled(self):
+    dap1 = DynamicPartitionMetadata()
+    dap2 = DynamicPartitionMetadata()
+    merged_dap = DynamicPartitionMetadata()
+    dap1.snapshot_enabled = True
+    dap2.snapshot_enabled = False
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+    self.assertFalse(merged_dap.snapshot_enabled)
+
+  def test_MergeDAPSnapshotEnabled(self):
+    dap1 = DynamicPartitionMetadata()
+    dap2 = DynamicPartitionMetadata()
+    merged_dap = DynamicPartitionMetadata()
+    merged_dap.snapshot_enabled = True
+    dap1.snapshot_enabled = True
+    dap2.snapshot_enabled = True
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+    self.assertTrue(merged_dap.snapshot_enabled)
+
+  def test_MergeDAPGroups(self):
+    dap1 = DynamicPartitionMetadata()
+    dap1.groups.append(DynamicPartitionGroup(
+        name="abc", partition_names=["a", "b", "c"]))
+    dap2 = DynamicPartitionMetadata()
+    dap2.groups.append(DynamicPartitionGroup(
+        name="abc", partition_names=["d", "e", "f"]))
+    merged_dap = DynamicPartitionMetadata()
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap1)
+    merge_ota.MergeDynamicPartitionMetadata(merged_dap, dap2)
+    self.assertEqual(len(merged_dap.groups), 1)
+    self.assertEqual(merged_dap.groups[0].name, "abc")
+    self.assertEqual(merged_dap.groups[0].partition_names, [
+                     "a", "b", "c", "d", "e", "f"])
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index 11cfee1..ad0f7a8 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -17,6 +17,7 @@
 import copy
 import os
 import os.path
+import tempfile
 import zipfile
 
 import common
@@ -24,17 +25,18 @@
 import test_utils
 from ota_utils import (
     BuildLegacyOtaMetadata, CalculateRuntimeDevicesAndFingerprints,
-    ConstructOtaApexInfo, FinalizeMetadata, GetPackageMetadata, PropertyFiles)
+    ConstructOtaApexInfo, FinalizeMetadata, GetPackageMetadata, PropertyFiles, AbOtaPropertyFiles, PayloadGenerator, StreamingPropertyFiles)
 from ota_from_target_files import (
-    _LoadOemDicts, AbOtaPropertyFiles,
+    _LoadOemDicts,
     GetTargetFilesZipForCustomImagesUpdates,
     GetTargetFilesZipForPartialUpdates,
     GetTargetFilesZipForSecondaryImages,
     GetTargetFilesZipWithoutPostinstallConfig,
-    Payload, PayloadSigner, POSTINSTALL_CONFIG,
-    StreamingPropertyFiles, AB_PARTITIONS)
+    POSTINSTALL_CONFIG, AB_PARTITIONS)
 from apex_utils import GetApexInfoFromTargetFiles
 from test_utils import PropertyFilesTestCase
+from common import OPTIONS
+from payload_signer import PayloadSigner
 
 
 def construct_target_files(secondary=False, compressedApex=False):
@@ -973,7 +975,7 @@
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_GetPayloadMetadataOffsetAndSize(self):
     target_file = construct_target_files()
-    payload = Payload()
+    payload = PayloadGenerator()
     payload.Generate(target_file)
 
     payload_signer = PayloadSigner()
@@ -1028,7 +1030,7 @@
         0, proc.returncode,
         'Failed to run brillo_update_payload:\n{}'.format(stdoutdata))
 
-    signed_metadata_sig_file = payload_signer.Sign(metadata_sig_file)
+    signed_metadata_sig_file = payload_signer.SignHashFile(metadata_sig_file)
 
     # Finally we can compare the two signatures.
     with open(signed_metadata_sig_file, 'rb') as verify_fp:
@@ -1038,7 +1040,7 @@
   def construct_zip_package_withValidPayload(with_metadata=False):
     # Cannot use construct_zip_package() since we need a "valid" payload.bin.
     target_file = construct_target_files()
-    payload = Payload()
+    payload = PayloadGenerator()
     payload.Generate(target_file)
 
     payload_signer = PayloadSigner()
@@ -1142,10 +1144,10 @@
     self.assertEqual('openssl', payload_signer.signer)
 
   def test_init_withExternalSigner(self):
-    common.OPTIONS.payload_signer = 'abc'
     common.OPTIONS.payload_signer_args = ['arg1', 'arg2']
     common.OPTIONS.payload_signer_maximum_signature_size = '512'
-    payload_signer = PayloadSigner()
+    payload_signer = PayloadSigner(
+        OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer='abc')
     self.assertEqual('abc', payload_signer.signer)
     self.assertEqual(['arg1', 'arg2'], payload_signer.signer_args)
     self.assertEqual(512, payload_signer.maximum_signature_size)
@@ -1168,35 +1170,36 @@
   def test_Sign(self):
     payload_signer = PayloadSigner()
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
 
   def test_Sign_withExternalSigner_openssl(self):
     """Uses openssl as the external payload signer."""
-    common.OPTIONS.payload_signer = 'openssl'
     common.OPTIONS.payload_signer_args = [
         'pkeyutl', '-sign', '-keyform', 'DER', '-inkey',
         os.path.join(self.testdata_dir, 'testkey.pk8'),
         '-pkeyopt', 'digest:sha256']
-    payload_signer = PayloadSigner()
+    payload_signer = PayloadSigner(
+        OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer="openssl")
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
 
   def test_Sign_withExternalSigner_script(self):
     """Uses testdata/payload_signer.sh as the external payload signer."""
-    common.OPTIONS.payload_signer = os.path.join(
+    external_signer = os.path.join(
         self.testdata_dir, 'payload_signer.sh')
-    os.chmod(common.OPTIONS.payload_signer, 0o700)
+    os.chmod(external_signer, 0o700)
     common.OPTIONS.payload_signer_args = [
         os.path.join(self.testdata_dir, 'testkey.pk8')]
-    payload_signer = PayloadSigner()
+    payload_signer = PayloadSigner(
+        OPTIONS.package_key, OPTIONS.private_key_suffix, payload_signer=external_signer)
     input_file = os.path.join(self.testdata_dir, self.SIGFILE)
-    signed_file = payload_signer.Sign(input_file)
+    signed_file = payload_signer.SignHashFile(input_file)
 
     verify_file = os.path.join(self.testdata_dir, self.SIGNED_SIGFILE)
     self._assertFilesEqual(verify_file, signed_file)
@@ -1219,7 +1222,7 @@
   @staticmethod
   def _create_payload_full(secondary=False):
     target_file = construct_target_files(secondary)
-    payload = Payload(secondary)
+    payload = PayloadGenerator(secondary, OPTIONS.wipe_user_data)
     payload.Generate(target_file)
     return payload
 
@@ -1227,7 +1230,7 @@
   def _create_payload_incremental():
     target_file = construct_target_files()
     source_file = construct_target_files()
-    payload = Payload()
+    payload = PayloadGenerator()
     payload.Generate(target_file, source_file)
     return payload
 
@@ -1245,7 +1248,7 @@
   def test_Generate_additionalArgs(self):
     target_file = construct_target_files()
     source_file = construct_target_files()
-    payload = Payload()
+    payload = PayloadGenerator()
     # This should work the same as calling payload.Generate(target_file,
     # source_file).
     payload.Generate(
@@ -1256,7 +1259,7 @@
   def test_Generate_invalidInput(self):
     target_file = construct_target_files()
     common.ZipDelete(target_file, 'IMAGES/vendor.img')
-    payload = Payload()
+    payload = PayloadGenerator()
     self.assertRaises(common.ExternalError, payload.Generate, target_file)
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -1292,6 +1295,9 @@
     common.OPTIONS.wipe_user_data = True
     payload = self._create_payload_full()
     payload.Sign(PayloadSigner())
+    with tempfile.NamedTemporaryFile() as fp:
+      with zipfile.ZipFile(fp, "w") as zfp:
+        payload.WriteToZip(zfp)
 
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("POWERWASH=1", properties_fp.read())
@@ -1300,6 +1306,9 @@
   def test_Sign_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())
+    with tempfile.NamedTemporaryFile() as fp:
+      with zipfile.ZipFile(fp, "w") as zfp:
+        payload.WriteToZip(zfp)
 
     with open(payload.payload_properties) as properties_fp:
       self.assertIn("SWITCH_SLOT_ON_REBOOT=0", properties_fp.read())
@@ -1324,33 +1333,17 @@
     with zipfile.ZipFile(output_file) as verify_zip:
       # First make sure we have the essential entries.
       namelist = verify_zip.namelist()
-      self.assertIn(Payload.PAYLOAD_BIN, namelist)
-      self.assertIn(Payload.PAYLOAD_PROPERTIES_TXT, namelist)
+      self.assertIn(PayloadGenerator.PAYLOAD_BIN, namelist)
+      self.assertIn(PayloadGenerator.PAYLOAD_PROPERTIES_TXT, namelist)
 
       # Then assert these entries are stored.
       for entry_info in verify_zip.infolist():
-        if entry_info.filename not in (Payload.PAYLOAD_BIN,
-                                       Payload.PAYLOAD_PROPERTIES_TXT):
+        if entry_info.filename not in (PayloadGenerator.PAYLOAD_BIN,
+                                       PayloadGenerator.PAYLOAD_PROPERTIES_TXT):
           continue
         self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
 
   @test_utils.SkipIfExternalToolsUnavailable()
-  def test_WriteToZip_unsignedPayload(self):
-    """Unsigned payloads should not be allowed to be written to zip."""
-    payload = self._create_payload_full()
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
-    # Also test with incremental payload.
-    payload = self._create_payload_incremental()
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      self.assertRaises(AssertionError, payload.WriteToZip, output_zip)
-
-  @test_utils.SkipIfExternalToolsUnavailable()
   def test_WriteToZip_secondary(self):
     payload = self._create_payload_full(secondary=True)
     payload.Sign(PayloadSigner())
@@ -1362,14 +1355,14 @@
     with zipfile.ZipFile(output_file) as verify_zip:
       # First make sure we have the essential entries.
       namelist = verify_zip.namelist()
-      self.assertIn(Payload.SECONDARY_PAYLOAD_BIN, namelist)
-      self.assertIn(Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
+      self.assertIn(PayloadGenerator.SECONDARY_PAYLOAD_BIN, namelist)
+      self.assertIn(PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT, namelist)
 
       # Then assert these entries are stored.
       for entry_info in verify_zip.infolist():
         if entry_info.filename not in (
-                Payload.SECONDARY_PAYLOAD_BIN,
-                Payload.SECONDARY_PAYLOAD_PROPERTIES_TXT):
+                PayloadGenerator.SECONDARY_PAYLOAD_BIN,
+                PayloadGenerator.SECONDARY_PAYLOAD_PROPERTIES_TXT):
           continue
         self.assertEqual(zipfile.ZIP_STORED, entry_info.compress_type)
 
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index c344e22..7723de7 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -59,6 +59,21 @@
     self.assertTrue(os.path.exists(signed_test_apex))
 
   @test_utils.SkipIfExternalToolsUnavailable()
+  def test_SignSepolicyApex(self):
+    test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
+    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
+    container_key = os.path.join(self.testdata_dir, 'testkey')
+    apk_keys = {'SEPolicy-33.zip': os.path.join(self.testdata_dir, 'testkey')}
+    signed_test_apex = sign_apex.SignApexFile(
+        'avbtool',
+        test_apex,
+        payload_key,
+        container_key,
+        False,
+        None)
+    self.assertTrue(os.path.exists(signed_test_apex))
+
+  @test_utils.SkipIfExternalToolsUnavailable()
   def test_SignCompressedApexFile(self):
     apex = os.path.join(test_utils.get_current_dir(), 'com.android.apex.compressed.v1.capex')
     payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
@@ -71,21 +86,3 @@
         False,
         codename_to_api_level_map={'S': 31, 'Tiramisu' : 32})
     self.assertTrue(os.path.exists(signed_apex))
-
-  @test_utils.SkipIfExternalToolsUnavailable()
-  def test_SignApexWithSepolicy(self):
-    test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
-    payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    container_key = os.path.join(self.testdata_dir, 'testkey')
-    sepolicy_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
-    sepolicy_cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
-    signed_test_apex = sign_apex.SignApexFile(
-        'avbtool',
-        test_apex,
-        payload_key,
-        container_key,
-        False,
-        None,
-        sepolicy_key=sepolicy_key,
-        sepolicy_cert=sepolicy_cert)
-    self.assertTrue(os.path.exists(signed_test_apex))
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 144a3cd..0cd7dac 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -23,8 +23,8 @@
 import test_utils
 from sign_target_files_apks import (
     CheckApkAndApexKeysAvailable, EditTags, GetApkFileInfo, ReadApexKeysInfo,
-    ReplaceCerts, ReplaceGkiSigningKey, ReplaceVerityKeyId, RewriteAvbProps,
-    RewriteProps, WriteOtacerts)
+    ReplaceCerts, ReplaceGkiSigningKey, RewriteAvbProps, RewriteProps,
+    WriteOtacerts)
 
 
 class SignTargetFilesApksTest(test_utils.ReleaseToolsTestCase):
@@ -154,64 +154,6 @@
         '\n'.join([prop[1] for prop in props]) + '\n',
         RewriteProps('\n'.join([prop[0] for prop in props])))
 
-  def test_ReplaceVerityKeyId(self):
-    BOOT_CMDLINE1 = (
-        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
-        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
-        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
-        "buildvariant=userdebug "
-        "veritykeyid=id:7e4333f9bba00adfe0ede979e28ed1920492b40f\n")
-
-    BOOT_CMDLINE2 = (
-        "console=ttyHSL0,115200,n8 androidboot.console=ttyHSL0 "
-        "androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
-        "lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
-        "buildvariant=userdebug "
-        "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
-
-    input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
-      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
-
-    # Test with the first certificate.
-    cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
-         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
-
-    with zipfile.ZipFile(output_file) as output_zip:
-      self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline').decode())
-
-    # Test with the second certificate.
-    cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
-
-    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
-         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      ReplaceVerityKeyId(input_zip, output_zip, cert_file)
-
-    with zipfile.ZipFile(output_file) as output_zip:
-      self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline').decode())
-
-  def test_ReplaceVerityKeyId_no_veritykeyid(self):
-    BOOT_CMDLINE = (
-        "console=ttyHSL0,115200,n8 androidboot.hardware=bullhead boot_cpus=0-5 "
-        "lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
-        "loop.max_part=7\n")
-
-    input_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'w', allowZip64=True) as input_zip:
-      input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
-
-    output_file = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip, \
-         zipfile.ZipFile(output_file, 'w', allowZip64=True) as output_zip:
-      ReplaceVerityKeyId(input_zip, output_zip, None)
-
-    with zipfile.ZipFile(output_file) as output_zip:
-      self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline').decode())
-
   def test_ReplaceCerts(self):
     cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
     with open(cert1_path) as cert1_fp:
@@ -476,7 +418,7 @@
       target_files_zip.writestr('META/apexkeys.txt', self.APEX_KEYS_TXT)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -486,7 +428,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_mismatchingContainerKeys(self):
     # Mismatching payload public / private keys.
@@ -516,7 +457,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -526,7 +467,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_missingPayloadPublicKey(self):
     # Invalid lines will be skipped.
@@ -540,7 +480,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -550,7 +490,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -564,7 +503,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -574,7 +513,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
 
   def test_ReadApexKeysInfo_presignedKeys(self):
     apex_keys = self.APEX_KEYS_TXT + (
@@ -588,7 +526,7 @@
       target_files_zip.writestr('META/apexkeys.txt', apex_keys)
 
     with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
+      keys_info = ReadApexKeysInfo(target_files_zip)
 
     self.assertEqual({
         'apex.apexd_test.apex': (
@@ -598,72 +536,6 @@
             'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
             'build/make/target/product/security/testkey', None),
         }, keys_info)
-    self.assertEqual({}, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyKeys(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" '
-        'sepolicy_key="build/make/target/product/security/testkey.key" '
-        'sepolicy_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'fsverity_tool="fsverity"')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            'build/make/target/product/security/testkey.key',
-            'build/make/target/product/security/testkey.x509.pem',
-            'fsverity'),
-        }, sepolicy_keys_info)
-
-  def test_ReadApexKeysInfo_withSepolicyApex(self):
-    apex_keys = self.APEX_KEYS_TXT + (
-        'name="sepolicy.apex" '
-        'public_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.avbpubkey" '
-        'private_key="system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem" '
-        'container_certificate="build/make/target/product/security/testkey.x509.pem" '
-        'container_private_key="build/make/target/product/security/testkey.pk8" ')
-    target_files = common.MakeTempFile(suffix='.zip')
-    with zipfile.ZipFile(target_files, 'w', allowZip64=True) as target_files_zip:
-      target_files_zip.writestr('META/apexkeys.txt', apex_keys)
-
-    with zipfile.ZipFile(target_files, allowZip64=True) as target_files_zip:
-      keys_info, sepolicy_keys_info = ReadApexKeysInfo(target_files_zip)
-
-    self.assertEqual({
-        'apex.apexd_test.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package.pem',
-            'build/make/target/product/security/testkey', None),
-        'apex.apexd_test_different_app.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        'sepolicy.apex': (
-            'system/apex/apexd/apexd_testdata/com.android.apex.test_package_2.pem',
-            'build/make/target/product/security/testkey', None),
-        }, keys_info)
-    self.assertEqual({'sepolicy.apex': (
-            None,
-            None,
-            None),
-        }, sepolicy_keys_info)
 
   def test_ReplaceGkiSigningKey(self):
     common.OPTIONS.gki_signing_key = 'release_gki_key'
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index e30d2b9..5bbcf7f 100755
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -19,6 +19,7 @@
 Utils for running unittests.
 """
 
+import avbtool
 import logging
 import os
 import os.path
@@ -57,12 +58,14 @@
   current_dir = os.path.dirname(os.path.realpath(__file__))
   return os.path.join(current_dir, 'testdata')
 
+
 def get_current_dir():
   """Returns the current dir, relative to the script dir."""
   # The script dir is the one we want, which could be different from pwd.
   current_dir = os.path.dirname(os.path.realpath(__file__))
   return current_dir
 
+
 def get_search_path():
   """Returns the search path that has 'framework/signapk.jar' under."""
 
@@ -83,14 +86,33 @@
       # In relative to 'build/make/tools/releasetools' in the Android source.
       ['..'] * 4 + ['out', 'host', 'linux-x86'],
       # Or running the script unpacked from otatools.zip.
-      ['..']):
+          ['..']):
     full_path = os.path.realpath(os.path.join(current_dir, *path))
     if signapk_exists(full_path):
       return full_path
   return None
 
 
-def construct_sparse_image(chunks):
+def append_avb_footer(file_path: str, partition_name: str = ""):
+  avb = avbtool.AvbTool()
+  try:
+    args = ["avbtool", "add_hashtree_footer", "--image", file_path,
+            "--partition_name", partition_name, "--do_not_generate_fec"]
+    avb.run(args)
+  except SystemExit:
+    raise ValueError(f"Failed to append hashtree footer {args}")
+
+
+def erase_avb_footer(file_path: str):
+  avb = avbtool.AvbTool()
+  try:
+    args = ["avbtool", "erase_footer", "--image", file_path]
+    avb.run(args)
+  except SystemExit:
+    raise ValueError(f"Failed to erase hashtree footer {args}")
+
+
+def construct_sparse_image(chunks, partition_name: str = ""):
   """Returns a sparse image file constructed from the given chunks.
 
   From system/core/libsparse/sparse_format.h.
@@ -151,6 +173,7 @@
       if data_size != 0:
         fp.write(os.urandom(data_size))
 
+  append_avb_footer(sparse_image, partition_name)
   return sparse_image
 
 
@@ -201,6 +224,7 @@
   def tearDown(self):
     common.Cleanup()
 
+
 class PropertyFilesTestCase(ReleaseToolsTestCase):
 
   @staticmethod
diff --git a/tools/releasetools/test_verity_utils.py b/tools/releasetools/test_verity_utils.py
index e2a022a..4a0ff09 100644
--- a/tools/releasetools/test_verity_utils.py
+++ b/tools/releasetools/test_verity_utils.py
@@ -27,249 +27,11 @@
 from test_utils import (
     get_testdata_dir, ReleaseToolsTestCase, SkipIfExternalToolsUnavailable)
 from verity_utils import (
-    CalculateVbmetaDigest, CreateHashtreeInfoGenerator,
-    CreateVerityImageBuilder, HashtreeInfo,
-    VerifiedBootVersion1HashtreeInfoGenerator)
+    CalculateVbmetaDigest, CreateVerityImageBuilder)
 
 BLOCK_SIZE = common.BLOCK_SIZE
 
 
-class VerifiedBootVersion1HashtreeInfoGeneratorTest(ReleaseToolsTestCase):
-
-  def setUp(self):
-    self.testdata_dir = get_testdata_dir()
-
-    self.partition_size = 1024 * 1024
-    self.prop_dict = {
-        'verity': 'true',
-        'verity_fec': 'true',
-        'system_verity_block_device': '/dev/block/system',
-        'system_size': self.partition_size
-    }
-
-    self.hash_algorithm = "sha256"
-    self.fixed_salt = (
-        "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7")
-    self.expected_root_hash = (
-        "0b7c4565e87b1026e11fbab91c0bc29e185c847a5b44d40e6e86e461e8adf80d")
-
-  def _CreateSimg(self, raw_data):  # pylint: disable=no-self-use
-    output_file = common.MakeTempFile()
-    raw_image = common.MakeTempFile()
-    with open(raw_image, 'wb') as f:
-      f.write(raw_data)
-
-    cmd = ["img2simg", raw_image, output_file, '4096']
-    common.RunAndCheckOutput(cmd)
-    return output_file
-
-  def _GenerateImage(self):
-    partition_size = 1024 * 1024
-    prop_dict = {
-        'partition_size': str(partition_size),
-        'verity': 'true',
-        'verity_block_device': '/dev/block/system',
-        'verity_key': os.path.join(self.testdata_dir, 'testkey'),
-        'verity_fec': 'true',
-        'verity_signer_cmd': 'verity_signer',
-    }
-    verity_image_builder = CreateVerityImageBuilder(prop_dict)
-    self.assertIsNotNone(verity_image_builder)
-    adjusted_size = verity_image_builder.CalculateMaxImageSize()
-
-    raw_image = bytearray(adjusted_size)
-    for i in range(adjusted_size):
-      raw_image[i] = ord('0') + i % 10
-
-    output_file = self._CreateSimg(raw_image)
-
-    # Append the verity metadata.
-    verity_image_builder.Build(output_file)
-
-    return output_file
-
-  @SkipIfExternalToolsUnavailable()
-  def test_CreateHashtreeInfoGenerator(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-
-    generator = CreateHashtreeInfoGenerator(
-        'system', image_file, self.prop_dict)
-    self.assertEqual(
-        VerifiedBootVersion1HashtreeInfoGenerator, type(generator))
-    self.assertEqual(self.partition_size, generator.partition_size)
-    self.assertTrue(generator.fec_supported)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_DecomposeSparseImage(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.DecomposeSparseImage(image_file)
-    self.assertEqual(991232, generator.filesystem_size)
-    self.assertEqual(12288, generator.hashtree_size)
-    self.assertEqual(32768, generator.metadata_size)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ParseHashtreeMetadata(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.DecomposeSparseImage(image_file)
-
-    # pylint: disable=protected-access
-    generator._ParseHashtreeMetadata()
-
-    self.assertEqual(
-        self.hash_algorithm, generator.hashtree_info.hash_algorithm)
-    self.assertEqual(self.fixed_salt, generator.hashtree_info.salt)
-    self.assertEqual(self.expected_root_hash, generator.hashtree_info.root_hash)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ValidateHashtree_smoke(self):
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._GenerateImage())
-
-    generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
-    info.hashtree_range = RangeSet(
-        data=[991232 // 4096, (991232 + 12288) // 4096])
-    info.hash_algorithm = self.hash_algorithm
-    info.salt = self.fixed_salt
-    info.root_hash = self.expected_root_hash
-
-    self.assertTrue(generator.ValidateHashtree())
-
-  @SkipIfExternalToolsUnavailable()
-  def test_ValidateHashtree_failure(self):
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        self.partition_size, 4096, True)
-    generator.image = sparse_img.SparseImage(self._GenerateImage())
-
-    generator.hashtree_info = info = HashtreeInfo()
-    info.filesystem_range = RangeSet(data=[0, 991232 // 4096])
-    info.hashtree_range = RangeSet(
-        data=[991232 // 4096, (991232 + 12288) // 4096])
-    info.hash_algorithm = self.hash_algorithm
-    info.salt = self.fixed_salt
-    info.root_hash = "a" + self.expected_root_hash[1:]
-
-    self.assertFalse(generator.ValidateHashtree())
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Generate(self):
-    image_file = sparse_img.SparseImage(self._GenerateImage())
-    generator = CreateHashtreeInfoGenerator('system', 4096, self.prop_dict)
-    info = generator.Generate(image_file)
-
-    self.assertEqual(RangeSet(data=[0, 991232 // 4096]), info.filesystem_range)
-    self.assertEqual(RangeSet(data=[991232 // 4096, (991232 + 12288) // 4096]),
-                     info.hashtree_range)
-    self.assertEqual(self.hash_algorithm, info.hash_algorithm)
-    self.assertEqual(self.fixed_salt, info.salt)
-    self.assertEqual(self.expected_root_hash, info.root_hash)
-
-
-class VerifiedBootVersion1VerityImageBuilderTest(ReleaseToolsTestCase):
-
-  DEFAULT_PARTITION_SIZE = 4096 * 1024
-  DEFAULT_PROP_DICT = {
-      'partition_size': str(DEFAULT_PARTITION_SIZE),
-      'verity': 'true',
-      'verity_block_device': '/dev/block/system',
-      'verity_key': os.path.join(get_testdata_dir(), 'testkey'),
-      'verity_fec': 'true',
-      'verity_signer_cmd': 'verity_signer',
-  }
-
-  def test_init(self):
-    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
-    verity_image_builder = CreateVerityImageBuilder(prop_dict)
-    self.assertIsNotNone(verity_image_builder)
-    self.assertEqual(1, verity_image_builder.version)
-
-  def test_init_MissingProps(self):
-    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
-    del prop_dict['verity']
-    self.assertIsNone(CreateVerityImageBuilder(prop_dict))
-
-    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
-    del prop_dict['verity_block_device']
-    self.assertIsNone(CreateVerityImageBuilder(prop_dict))
-
-  @SkipIfExternalToolsUnavailable()
-  def test_CalculateMaxImageSize(self):
-    verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
-    size = verity_image_builder.CalculateMaxImageSize()
-    self.assertLess(size, self.DEFAULT_PARTITION_SIZE)
-
-    # Same result by explicitly passing the partition size.
-    self.assertEqual(
-        verity_image_builder.CalculateMaxImageSize(),
-        verity_image_builder.CalculateMaxImageSize(
-            self.DEFAULT_PARTITION_SIZE))
-
-  @staticmethod
-  def _BuildAndVerify(prop, verify_key):
-    verity_image_builder = CreateVerityImageBuilder(prop)
-    image_size = verity_image_builder.CalculateMaxImageSize()
-
-    # Build the sparse image with verity metadata.
-    input_dir = common.MakeTempDir()
-    image = common.MakeTempFile(suffix='.img')
-    cmd = ['mkuserimg_mke2fs', input_dir, image, 'ext4', '/system',
-           str(image_size), '-j', '0', '-s']
-    common.RunAndCheckOutput(cmd)
-    verity_image_builder.Build(image)
-
-    # Verify the verity metadata.
-    cmd = ['verity_verifier', image, '-mincrypt', verify_key]
-    common.RunAndCheckOutput(cmd)
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Build(self):
-    self._BuildAndVerify(
-        self.DEFAULT_PROP_DICT,
-        os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Build_ValidationCheck(self):
-    # A validity check for the test itself: the image shouldn't be verifiable
-    # with wrong key.
-    self.assertRaises(
-        common.ExternalError,
-        self._BuildAndVerify,
-        self.DEFAULT_PROP_DICT,
-        os.path.join(get_testdata_dir(), 'verity_mincrypt'))
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Build_FecDisabled(self):
-    prop_dict = copy.deepcopy(self.DEFAULT_PROP_DICT)
-    del prop_dict['verity_fec']
-    self._BuildAndVerify(
-        prop_dict,
-        os.path.join(get_testdata_dir(), 'testkey_mincrypt'))
-
-  @SkipIfExternalToolsUnavailable()
-  def test_Build_SquashFs(self):
-    verity_image_builder = CreateVerityImageBuilder(self.DEFAULT_PROP_DICT)
-    verity_image_builder.CalculateMaxImageSize()
-
-    # Build the sparse image with verity metadata.
-    input_dir = common.MakeTempDir()
-    image = common.MakeTempFile(suffix='.img')
-    cmd = ['mksquashfsimage.sh', input_dir, image, '-s']
-    common.RunAndCheckOutput(cmd)
-    verity_image_builder.PadSparseImage(image)
-    verity_image_builder.Build(image)
-
-    # Verify the verity metadata.
-    cmd = ["verity_verifier", image, '-mincrypt',
-           os.path.join(get_testdata_dir(), 'testkey_mincrypt')]
-    common.RunAndCheckOutput(cmd)
-
-
 class VerifiedBootVersion2VerityImageBuilderTest(ReleaseToolsTestCase):
 
   DEFAULT_PROP_DICT = {
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
index f7d267d..2c646cd 100644
--- a/tools/releasetools/testdata/sepolicy.apex
+++ b/tools/releasetools/testdata/sepolicy.apex
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta.zip b/tools/releasetools/testdata/tuna_vbmeta.zip
new file mode 100644
index 0000000..64e7bb3
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta.zip
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta_system.zip b/tools/releasetools/testdata/tuna_vbmeta_system.zip
new file mode 100644
index 0000000..3d76ef0
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta_system.zip
Binary files differ
diff --git a/tools/releasetools/testdata/tuna_vbmeta_vendor.zip b/tools/releasetools/testdata/tuna_vbmeta_vendor.zip
new file mode 100644
index 0000000..6994c59
--- /dev/null
+++ b/tools/releasetools/testdata/tuna_vbmeta_vendor.zip
Binary files differ
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index d55ad88..755241d 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -49,107 +49,6 @@
     Exception.__init__(self, message)
 
 
-def GetVerityFECSize(image_size):
-  cmd = ["fec", "-s", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVerityTreeSize(image_size):
-  cmd = ["build_verity_tree", "-s", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVerityMetadataSize(image_size):
-  cmd = ["build_verity_metadata", "size", str(image_size)]
-  output = common.RunAndCheckOutput(cmd, verbose=False)
-  return int(output)
-
-
-def GetVeritySize(image_size, fec_supported):
-  verity_tree_size = GetVerityTreeSize(image_size)
-  verity_metadata_size = GetVerityMetadataSize(image_size)
-  verity_size = verity_tree_size + verity_metadata_size
-  if fec_supported:
-    fec_size = GetVerityFECSize(image_size + verity_size)
-    return verity_size + fec_size
-  return verity_size
-
-
-def GetSimgSize(image_file):
-  simg = sparse_img.SparseImage(image_file, build_map=False)
-  return simg.blocksize * simg.total_blocks
-
-
-def ZeroPadSimg(image_file, pad_size):
-  blocks = pad_size // BLOCK_SIZE
-  logger.info("Padding %d blocks (%d bytes)", blocks, pad_size)
-  simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
-  simg.AppendFillChunk(0, blocks)
-
-
-def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path,
-                   padding_size):
-  cmd = ["fec", "-e", "-p", str(padding_size), sparse_image_path,
-         verity_path, verity_fec_path]
-  common.RunAndCheckOutput(cmd)
-
-
-def BuildVerityTree(sparse_image_path, verity_image_path):
-  cmd = ["build_verity_tree", "-A", FIXED_SALT, sparse_image_path,
-         verity_image_path]
-  output = common.RunAndCheckOutput(cmd)
-  root, salt = output.split()
-  return root, salt
-
-
-def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
-                        block_device, signer_path, key, signer_args,
-                        verity_disable):
-  cmd = ["build_verity_metadata", "build", str(image_size),
-         verity_metadata_path, root_hash, salt, block_device, signer_path, key]
-  if signer_args:
-    cmd.append("--signer_args=\"%s\"" % (' '.join(signer_args),))
-  if verity_disable:
-    cmd.append("--verity_disable")
-  common.RunAndCheckOutput(cmd)
-
-
-def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
-  """Appends the unsparse image to the given sparse image.
-
-  Args:
-    sparse_image_path: the path to the (sparse) image
-    unsparse_image_path: the path to the (unsparse) image
-
-  Raises:
-    BuildVerityImageError: On error.
-  """
-  cmd = ["append2simg", sparse_image_path, unsparse_image_path]
-  try:
-    common.RunAndCheckOutput(cmd)
-  except:
-    logger.exception(error_message)
-    raise BuildVerityImageError(error_message)
-
-
-def Append(target, file_to_append, error_message):
-  """Appends file_to_append to target.
-
-  Raises:
-    BuildVerityImageError: On error.
-  """
-  try:
-    with open(target, 'ab') as out_file, \
-        open(file_to_append, 'rb') as input_file:
-      for line in input_file:
-        out_file.write(line)
-  except IOError:
-    logger.exception(error_message)
-    raise BuildVerityImageError(error_message)
-
-
 def CreateVerityImageBuilder(prop_dict):
   """Returns a verity image builder based on the given build properties.
 
@@ -166,23 +65,6 @@
   if partition_size:
     partition_size = int(partition_size)
 
-  # Verified Boot 1.0
-  verity_supported = prop_dict.get("verity") == "true"
-  is_verity_partition = "verity_block_device" in prop_dict
-  if verity_supported and is_verity_partition:
-    if OPTIONS.verity_signer_path is not None:
-      signer_path = OPTIONS.verity_signer_path
-    else:
-      signer_path = prop_dict["verity_signer_cmd"]
-    return Version1VerityImageBuilder(
-        partition_size,
-        prop_dict["verity_block_device"],
-        prop_dict.get("verity_fec") == "true",
-        signer_path,
-        prop_dict["verity_key"] + ".pk8",
-        OPTIONS.verity_signer_args,
-        "verity_disable" in prop_dict)
-
   # Verified Boot 2.0
   if (prop_dict.get("avb_hash_enable") == "true" or
       prop_dict.get("avb_hashtree_enable") == "true"):
@@ -245,125 +127,6 @@
     raise NotImplementedError
 
 
-class Version1VerityImageBuilder(VerityImageBuilder):
-  """A VerityImageBuilder for Verified Boot 1.0."""
-
-  def __init__(self, partition_size, block_dev, fec_supported, signer_path,
-               signer_key, signer_args, verity_disable):
-    self.version = 1
-    self.partition_size = partition_size
-    self.block_device = block_dev
-    self.fec_supported = fec_supported
-    self.signer_path = signer_path
-    self.signer_key = signer_key
-    self.signer_args = signer_args
-    self.verity_disable = verity_disable
-    self.image_size = None
-    self.verity_size = None
-
-  def CalculateDynamicPartitionSize(self, image_size):
-    # This needs to be implemented. Note that returning the given image size as
-    # the partition size doesn't make sense, as it will fail later.
-    raise NotImplementedError
-
-  def CalculateMaxImageSize(self, partition_size=None):
-    """Calculates the max image size by accounting for the verity metadata.
-
-    Args:
-      partition_size: The partition size, which defaults to self.partition_size
-          if unspecified.
-
-    Returns:
-      The size of the image adjusted for verity metadata.
-    """
-    if partition_size is None:
-      partition_size = self.partition_size
-    assert partition_size > 0, \
-        "Invalid partition size: {}".format(partition_size)
-
-    hi = partition_size
-    if hi % BLOCK_SIZE != 0:
-      hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
-
-    # verity tree and fec sizes depend on the partition size, which
-    # means this estimate is always going to be unnecessarily small
-    verity_size = GetVeritySize(hi, self.fec_supported)
-    lo = partition_size - verity_size
-    result = lo
-
-    # do a binary search for the optimal size
-    while lo < hi:
-      i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
-      v = GetVeritySize(i, self.fec_supported)
-      if i + v <= partition_size:
-        if result < i:
-          result = i
-          verity_size = v
-        lo = i + BLOCK_SIZE
-      else:
-        hi = i
-
-    self.image_size = result
-    self.verity_size = verity_size
-
-    logger.info(
-        "Calculated image size for verity: partition_size %d, image_size %d, "
-        "verity_size %d", partition_size, result, verity_size)
-    return result
-
-  def Build(self, out_file):
-    """Creates an image that is verifiable using dm-verity.
-
-    Args:
-      out_file: the output image.
-
-    Returns:
-      AssertionError: On invalid partition sizes.
-      BuildVerityImageError: On other errors.
-    """
-    image_size = int(self.image_size)
-    tempdir_name = common.MakeTempDir(suffix="_verity_images")
-
-    # Get partial image paths.
-    verity_image_path = os.path.join(tempdir_name, "verity.img")
-    verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
-
-    # Build the verity tree and get the root hash and salt.
-    root_hash, salt = BuildVerityTree(out_file, verity_image_path)
-
-    # Build the metadata blocks.
-    BuildVerityMetadata(
-        image_size, verity_metadata_path, root_hash, salt, self.block_device,
-        self.signer_path, self.signer_key, self.signer_args,
-        self.verity_disable)
-
-    padding_size = self.partition_size - self.image_size - self.verity_size
-    assert padding_size >= 0
-
-    # Build the full verified image.
-    Append(
-        verity_image_path, verity_metadata_path,
-        "Failed to append verity metadata")
-
-    if self.fec_supported:
-      # Build FEC for the entire partition, including metadata.
-      verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
-      BuildVerityFEC(
-          out_file, verity_image_path, verity_fec_path, padding_size)
-      Append(verity_image_path, verity_fec_path, "Failed to append FEC")
-
-    Append2Simg(
-        out_file, verity_image_path, "Failed to append verity data")
-
-  def PadSparseImage(self, out_file):
-    sparse_image_size = GetSimgSize(out_file)
-    if sparse_image_size > self.image_size:
-      raise BuildVerityImageError(
-          "Error: image size of {} is larger than partition size of "
-          "{}".format(sparse_image_size, self.image_size))
-    ZeroPadSimg(out_file, self.image_size - sparse_image_size)
-
-
 class VerifiedBootVersion2VerityImageBuilder(VerityImageBuilder):
   """A VerityImageBuilder for Verified Boot 2.0."""
 
@@ -519,199 +282,6 @@
       raise BuildVerityImageError("Failed to add AVB footer: {}".format(output))
 
 
-class HashtreeInfoGenerationError(Exception):
-  """An Exception raised during hashtree info generation."""
-
-  def __init__(self, message):
-    Exception.__init__(self, message)
-
-
-class HashtreeInfo(object):
-  def __init__(self):
-    self.hashtree_range = None
-    self.filesystem_range = None
-    self.hash_algorithm = None
-    self.salt = None
-    self.root_hash = None
-
-
-def CreateHashtreeInfoGenerator(partition_name, block_size, info_dict):
-  generator = None
-  if (info_dict.get("verity") == "true" and
-      info_dict.get("{}_verity_block_device".format(partition_name))):
-    partition_size = info_dict["{}_size".format(partition_name)]
-    fec_supported = info_dict.get("verity_fec") == "true"
-    generator = VerifiedBootVersion1HashtreeInfoGenerator(
-        partition_size, block_size, fec_supported)
-
-  return generator
-
-
-class HashtreeInfoGenerator(object):
-  def Generate(self, image):
-    raise NotImplementedError
-
-  def DecomposeSparseImage(self, image):
-    raise NotImplementedError
-
-  def ValidateHashtree(self):
-    raise NotImplementedError
-
-
-class VerifiedBootVersion1HashtreeInfoGenerator(HashtreeInfoGenerator):
-  """A class that parses the metadata of hashtree for a given partition."""
-
-  def __init__(self, partition_size, block_size, fec_supported):
-    """Initialize VerityTreeInfo with the sparse image and input property.
-
-    Arguments:
-      partition_size: The whole size in bytes of a partition, including the
-          filesystem size, padding size, and verity size.
-      block_size: Expected size in bytes of each block for the sparse image.
-      fec_supported: True if the verity section contains fec data.
-    """
-
-    self.block_size = block_size
-    self.partition_size = partition_size
-    self.fec_supported = fec_supported
-
-    self.image = None
-    self.filesystem_size = None
-    self.hashtree_size = None
-    self.metadata_size = None
-
-    prop_dict = {
-        'partition_size': str(partition_size),
-        'verity': 'true',
-        'verity_fec': 'true' if fec_supported else None,
-        # 'verity_block_device' needs to be present to indicate a verity-enabled
-        # partition.
-        'verity_block_device': '',
-        # We don't need the following properties that are needed for signing the
-        # verity metadata.
-        'verity_key': '',
-        'verity_signer_cmd': None,
-    }
-    self.verity_image_builder = CreateVerityImageBuilder(prop_dict)
-
-    self.hashtree_info = HashtreeInfo()
-
-  def DecomposeSparseImage(self, image):
-    """Calculate the verity size based on the size of the input image.
-
-    Since we already know the structure of a verity enabled image to be:
-    [filesystem, verity_hashtree, verity_metadata, fec_data]. We can then
-    calculate the size and offset of each section.
-    """
-
-    self.image = image
-    assert self.block_size == image.blocksize
-    assert self.partition_size == image.total_blocks * self.block_size, \
-        "partition size {} doesn't match with the calculated image size." \
-        " total_blocks: {}".format(self.partition_size, image.total_blocks)
-
-    adjusted_size = self.verity_image_builder.CalculateMaxImageSize()
-    assert adjusted_size % self.block_size == 0
-
-    verity_tree_size = GetVerityTreeSize(adjusted_size)
-    assert verity_tree_size % self.block_size == 0
-
-    metadata_size = GetVerityMetadataSize(adjusted_size)
-    assert metadata_size % self.block_size == 0
-
-    self.filesystem_size = adjusted_size
-    self.hashtree_size = verity_tree_size
-    self.metadata_size = metadata_size
-
-    self.hashtree_info.filesystem_range = RangeSet(
-        data=[0, adjusted_size // self.block_size])
-    self.hashtree_info.hashtree_range = RangeSet(
-        data=[adjusted_size // self.block_size,
-              (adjusted_size + verity_tree_size) // self.block_size])
-
-  def _ParseHashtreeMetadata(self):
-    """Parses the hash_algorithm, root_hash, salt from the metadata block."""
-
-    metadata_start = self.filesystem_size + self.hashtree_size
-    metadata_range = RangeSet(
-        data=[metadata_start // self.block_size,
-              (metadata_start + self.metadata_size) // self.block_size])
-    meta_data = b''.join(self.image.ReadRangeSet(metadata_range))
-
-    # More info about the metadata structure available in:
-    # system/extras/verity/build_verity_metadata.py
-    META_HEADER_SIZE = 268
-    header_bin = meta_data[0:META_HEADER_SIZE]
-    header = struct.unpack("II256sI", header_bin)
-
-    # header: magic_number, version, signature, table_len
-    assert header[0] == 0xb001b001, header[0]
-    table_len = header[3]
-    verity_table = meta_data[META_HEADER_SIZE: META_HEADER_SIZE + table_len]
-    table_entries = verity_table.rstrip().split()
-
-    # Expected verity table format: "1 block_device block_device block_size
-    # block_size data_blocks data_blocks hash_algorithm root_hash salt"
-    assert len(table_entries) == 10, "Unexpected verity table size {}".format(
-        len(table_entries))
-    assert (int(table_entries[3]) == self.block_size and
-            int(table_entries[4]) == self.block_size)
-    assert (int(table_entries[5]) * self.block_size == self.filesystem_size and
-            int(table_entries[6]) * self.block_size == self.filesystem_size)
-
-    self.hashtree_info.hash_algorithm = table_entries[7].decode()
-    self.hashtree_info.root_hash = table_entries[8].decode()
-    self.hashtree_info.salt = table_entries[9].decode()
-
-  def ValidateHashtree(self):
-    """Checks that we can reconstruct the verity hash tree."""
-
-    # Writes the filesystem section to a temp file; and calls the executable
-    # build_verity_tree to construct the hash tree.
-    adjusted_partition = common.MakeTempFile(prefix="adjusted_partition")
-    with open(adjusted_partition, "wb") as fd:
-      self.image.WriteRangeDataToFd(self.hashtree_info.filesystem_range, fd)
-
-    generated_verity_tree = common.MakeTempFile(prefix="verity")
-    root_hash, salt = BuildVerityTree(adjusted_partition, generated_verity_tree)
-
-    # The salt should be always identical, as we use fixed value.
-    assert salt == self.hashtree_info.salt, \
-        "Calculated salt {} doesn't match the one in metadata {}".format(
-            salt, self.hashtree_info.salt)
-
-    if root_hash != self.hashtree_info.root_hash:
-      logger.warning(
-          "Calculated root hash %s doesn't match the one in metadata %s",
-          root_hash, self.hashtree_info.root_hash)
-      return False
-
-    # Reads the generated hash tree and checks if it has the exact same bytes
-    # as the one in the sparse image.
-    with open(generated_verity_tree, 'rb') as fd:
-      return fd.read() == b''.join(self.image.ReadRangeSet(
-          self.hashtree_info.hashtree_range))
-
-  def Generate(self, image):
-    """Parses and validates the hashtree info in a sparse image.
-
-    Returns:
-      hashtree_info: The information needed to reconstruct the hashtree.
-
-    Raises:
-      HashtreeInfoGenerationError: If we fail to generate the exact bytes of
-          the hashtree.
-    """
-
-    self.DecomposeSparseImage(image)
-    self._ParseHashtreeMetadata()
-
-    if not self.ValidateHashtree():
-      raise HashtreeInfoGenerationError("Failed to reconstruct the verity tree")
-
-    return self.hashtree_info
-
-
 def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
                             key_path, algorithm, signing_args):
   builder = None
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index c127dbe..25c53d3 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -901,7 +901,7 @@
      * Tries to load a JSE Provider by class name. This is for custom PrivateKey
      * types that might be stored in PKCS#11-like storage.
      */
-    private static void loadProviderIfNecessary(String providerClassName) {
+    private static void loadProviderIfNecessary(String providerClassName, String providerArg) {
         if (providerClassName == null) {
             return;
         }
@@ -920,27 +920,41 @@
             return;
         }
 
-        Constructor<?> constructor = null;
-        for (Constructor<?> c : klass.getConstructors()) {
-            if (c.getParameterTypes().length == 0) {
-                constructor = c;
-                break;
+        Constructor<?> constructor;
+        Object o = null;
+        if (providerArg == null) {
+            try {
+                constructor = klass.getConstructor();
+                o = constructor.newInstance();
+            } catch (ReflectiveOperationException e) {
+                e.printStackTrace();
+                System.err.println("Unable to instantiate " + providerClassName
+                        + " with a zero-arg constructor");
+                System.exit(1);
+            }
+        } else {
+            try {
+                constructor = klass.getConstructor(String.class);
+                o = constructor.newInstance(providerArg);
+            } catch (ReflectiveOperationException e) {
+                // This is expected from JDK 9+; the single-arg constructor accepting the
+                // configuration has been replaced with a configure(String) method to be invoked
+                // after instantiating the Provider with the zero-arg constructor.
+                try {
+                    constructor = klass.getConstructor();
+                    o = constructor.newInstance();
+                    // The configure method will return either the modified Provider or a new
+                    // Provider if this one cannot be configured in-place.
+                    o = klass.getMethod("configure", String.class).invoke(o, providerArg);
+                } catch (ReflectiveOperationException roe) {
+                    roe.printStackTrace();
+                    System.err.println("Unable to instantiate " + providerClassName
+                            + " with the provided argument " + providerArg);
+                    System.exit(1);
+                }
             }
         }
-        if (constructor == null) {
-            System.err.println("No zero-arg constructor found for " + providerClassName);
-            System.exit(1);
-            return;
-        }
 
-        final Object o;
-        try {
-            o = constructor.newInstance();
-        } catch (Exception e) {
-            e.printStackTrace();
-            System.exit(1);
-            return;
-        }
         if (!(o instanceof Provider)) {
             System.err.println("Not a Provider class: " + providerClassName);
             System.exit(1);
@@ -1049,6 +1063,7 @@
                            "[-a <alignment>] " +
                            "[--align-file-size] " +
                            "[-providerClass <className>] " +
+                           "[-providerArg <configureArg>] " +
                            "[-loadPrivateKeysFromKeyStore <keyStoreName>]" +
                            "[-keyStorePin <pin>]" +
                            "[--min-sdk-version <n>] " +
@@ -1073,6 +1088,7 @@
 
         boolean signWholeFile = false;
         String providerClass = null;
+        String providerArg = null;
         String keyStoreName = null;
         String keyStorePin = null;
         int alignment = 4;
@@ -1081,6 +1097,7 @@
         boolean signUsingApkSignatureSchemeV2 = true;
         boolean signUsingApkSignatureSchemeV4 = false;
         SigningCertificateLineage certLineage = null;
+        Integer rotationMinSdkVersion = null;
 
         int argstart = 0;
         while (argstart < args.length && args[argstart].startsWith("-")) {
@@ -1093,6 +1110,12 @@
                 }
                 providerClass = args[++argstart];
                 ++argstart;
+            } else if("-providerArg".equals(args[argstart])) {
+                if (argstart + 1 >= args.length) {
+                    usage();
+                }
+                providerArg = args[++argstart];
+                ++argstart;
             } else if ("-loadPrivateKeysFromKeyStore".equals(args[argstart])) {
                 if (argstart + 1 >= args.length) {
                     usage();
@@ -1135,6 +1158,15 @@
                             "Error reading lineage file: " + e.getMessage());
                 }
                 ++argstart;
+            } else if ("--rotation-min-sdk-version".equals(args[argstart])) {
+                String rotationMinSdkVersionString = args[++argstart];
+                try {
+                    rotationMinSdkVersion = Integer.parseInt(rotationMinSdkVersionString);
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException(
+                            "--rotation-min-sdk-version must be a decimal number: " + rotationMinSdkVersionString);
+                }
+                ++argstart;
             } else {
                 usage();
             }
@@ -1153,7 +1185,7 @@
             System.exit(2);
         }
 
-        loadProviderIfNecessary(providerClass);
+        loadProviderIfNecessary(providerClass, providerArg);
 
         String inputFilename = args[numArgsExcludeV4FilePath - 2];
         String outputFilename = args[numArgsExcludeV4FilePath - 1];
@@ -1226,15 +1258,22 @@
                     }
                 }
 
-                try (ApkSignerEngine apkSigner =
-                        new DefaultApkSignerEngine.Builder(
-                                createSignerConfigs(privateKey, publicKey), minSdkVersion)
-                                .setV1SigningEnabled(true)
-                                .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
-                                .setOtherSignersSignaturesPreserved(false)
-                                .setCreatedBy("1.0 (Android SignApk)")
-                                .setSigningCertificateLineage(certLineage)
-                                .build()) {
+                DefaultApkSignerEngine.Builder builder = new DefaultApkSignerEngine.Builder(
+                    createSignerConfigs(privateKey, publicKey), minSdkVersion)
+                    .setV1SigningEnabled(true)
+                    .setV2SigningEnabled(signUsingApkSignatureSchemeV2)
+                    .setOtherSignersSignaturesPreserved(false)
+                    .setCreatedBy("1.0 (Android SignApk)");
+
+                if (certLineage != null) {
+                   builder = builder.setSigningCertificateLineage(certLineage);
+                }
+
+                if (rotationMinSdkVersion != null) {
+                   builder = builder.setMinSdkVersionForRotation(rotationMinSdkVersion);
+                }
+
+                try (ApkSignerEngine apkSigner = builder.build()) {
                     // We don't preserve the input APK's APK Signing Block (which contains v2
                     // signatures)
                     apkSigner.inputApkSigningBlock(null);
diff --git a/tools/soong_to_convert.py b/tools/soong_to_convert.py
index 949131b..649829f 100755
--- a/tools/soong_to_convert.py
+++ b/tools/soong_to_convert.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -50,9 +50,6 @@
 Not all problems can be discovered, but this is a starting point.
 
 """
-
-from __future__ import print_function
-
 import csv
 import sys
 
@@ -113,7 +110,7 @@
 
 def main(filename):
     """Read the CSV file, print the results"""
-    with open(filename, 'rb') as csvfile:
+    with open(filename, 'r') as csvfile:
         results = process(csv.reader(csvfile))
 
     native_results = filter(results, "native")
diff --git a/tools/stub_diff_analyzer.py b/tools/stub_diff_analyzer.py
new file mode 100644
index 0000000..e49d092
--- /dev/null
+++ b/tools/stub_diff_analyzer.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from sys import exit
+from typing import List
+from glob import glob
+from pathlib import Path
+from collections import defaultdict
+from difflib import Differ
+from re import split
+from tqdm import tqdm
+import argparse
+
+
+DIFFER_CODE_LEN = 2
+
+class DifferCodes:
+    COMMON = '  '
+    UNIQUE_FIRST = '- '
+    UNIQUE_SECOND = '+ '
+    DIFF_IDENT = '? '
+
+class FilesDiffAnalyzer:
+    def __init__(self, args) -> None:
+        self.out_dir = args.out_dir
+        self.show_diff = args.show_diff
+        self.skip_words = args.skip_words
+        self.first_dir = args.first_dir
+        self.second_dir = args.second_dir
+        self.include_common = args.include_common
+
+        self.first_dir_files = self.get_files(self.first_dir)
+        self.second_dir_files = self.get_files(self.second_dir)
+        self.common_file_map = defaultdict(set)
+
+        self.map_common_files(self.first_dir_files, self.first_dir)
+        self.map_common_files(self.second_dir_files, self.second_dir)
+
+    def get_files(self, dir: str) -> List[str]:
+        """Get all files directory in the input directory including the files in the subdirectories
+
+        Recursively finds all files in the input directory.
+        Returns a list of file directory strings, which do not include directories but only files.
+        List is sorted in alphabetical order of the file directories.
+
+        Args:
+            dir: Directory to get the files. String.
+
+        Returns:
+            A list of file directory strings within the input directory.
+            Sorted in Alphabetical order.
+
+        Raises:
+            FileNotFoundError: An error occurred accessing the non-existing directory
+        """
+
+        if not dir_exists(dir):
+            raise FileNotFoundError("Directory does not exist")
+
+        if dir[:-2] != "**":
+            if dir[:-1] != "/":
+                dir += "/"
+            dir += "**"
+
+        return [file for file in sorted(glob(dir, recursive=True)) if Path(file).is_file()]
+
+    def map_common_files(self, files: List[str], dir: str) -> None:
+        for file in files:
+            file_name = file.split(dir, 1)[-1]
+            self.common_file_map[file_name].add(dir)
+        return
+
+    def compare_file_contents(self, first_file: str, second_file: str) -> List[str]:
+        """Compare the contents of the files and return different lines
+
+        Given two file directory strings, compare the contents of the two files
+        and return the list of file contents string prepended with unique identifier codes.
+        The identifier codes include:
+        - '  '(two empty space characters): Line common to two files
+        - '- '(minus followed by a space) : Line unique to first file
+        - '+ '(plus followed by a space)  : Line unique to second file
+
+        Args:
+            first_file: First file directory string to compare the content
+            second_file: Second file directory string to compare the content
+
+        Returns:
+            A list of the file content strings. For example:
+
+            [
+                "  Foo",
+                "- Bar",
+                "+ Baz"
+            ]
+        """
+
+        d = Differ()
+        first_file_contents = sort_methods(get_file_contents(first_file))
+        second_file_contents = sort_methods(get_file_contents(second_file))
+        diff = list(d.compare(first_file_contents, second_file_contents))
+        ret = [f"diff {first_file} {second_file}"]
+
+        idx = 0
+        while idx < len(diff):
+            line = diff[idx]
+            line_code = line[:DIFFER_CODE_LEN]
+
+            match line_code:
+                case DifferCodes.COMMON:
+                    if self.include_common:
+                        ret.append(line)
+
+                case DifferCodes.UNIQUE_FIRST:
+                    # Should compare line
+                    if (idx < len(diff) - 1 and
+                        (next_line_code := diff[idx + 1][:DIFFER_CODE_LEN])
+                        not in (DifferCodes.UNIQUE_FIRST, DifferCodes.COMMON)):
+                        delta = 1 if next_line_code == DifferCodes.UNIQUE_SECOND else 2
+                        line_to_compare = diff[idx + delta]
+                        if self.lines_differ(line, line_to_compare):
+                            ret.extend([line, line_to_compare])
+                        else:
+                            if self.include_common:
+                                ret.append(DifferCodes.COMMON +
+                                           line[DIFFER_CODE_LEN:])
+                        idx += delta
+                    else:
+                        ret.append(line)
+
+                case DifferCodes.UNIQUE_SECOND:
+                    ret.append(line)
+
+                case DifferCodes.DIFF_IDENT:
+                    pass
+            idx += 1
+        return ret
+
+    def lines_differ(self, line1: str, line2: str) -> bool:
+        """Check if the input lines are different or not
+
+        Compare the two lines word by word and check if the two lines are different or not.
+        If the different words in the comparing lines are included in skip_words,
+        the lines are not considered different.
+
+        Args:
+            line1:      first line to compare
+            line2:      second line to compare
+
+        Returns:
+            Boolean value indicating if the two lines are different or not
+
+        """
+        # Split by '.' or ' '(whitespace)
+        def split_words(line: str) -> List[str]:
+            return split('\\s|\\.', line[DIFFER_CODE_LEN:])
+
+        line1_words, line2_words = split_words(line1), split_words(line2)
+        if len(line1_words) != len(line2_words):
+            return True
+
+        for word1, word2 in zip(line1_words, line2_words):
+            if word1 != word2:
+                # not check if words are equal to skip word, but
+                # check if words contain skip word as substring
+                if all(sw not in word1 and sw not in word2 for sw in self.skip_words):
+                    return True
+
+        return False
+
+    def analyze(self) -> None:
+        """Analyze file contents in both directories and write to output or console.
+        """
+        for file in tqdm(sorted(self.common_file_map.keys())):
+            val = self.common_file_map[file]
+
+            # When file exists in both directories
+            lines = list()
+            if val == set([self.first_dir, self.second_dir]):
+                lines = self.compare_file_contents(
+                    self.first_dir + file, self.second_dir + file)
+            else:
+                existing_dir, not_existing_dir = (
+                    (self.first_dir, self.second_dir) if self.first_dir in val
+                    else (self.second_dir, self.first_dir))
+
+                lines = [f"{not_existing_dir}{file} does not exist."]
+
+                if self.show_diff:
+                    lines.append(f"Content of {existing_dir}{file}: \n")
+                    lines.extend(get_file_contents(existing_dir + file))
+
+            self.write(lines)
+
+    def write(self, lines: List[str]) -> None:
+        if self.out_dir == "":
+            pprint(lines)
+        else:
+            write_lines(self.out_dir, lines)
+
+###
+# Helper functions
+###
+
+def sort_methods(lines: List[str]) -> List[str]:
+    """Sort class methods in the file contents by alphabetical order
+
+    Given lines of Java file contents, return lines with class methods sorted in alphabetical order.
+    Also omit empty lines or lines with spaces.
+    For example:
+        l = [
+            "package android.test;",
+            "",
+            "public static final int ORANGE = 1;",
+            "",
+            "public class TestClass {",
+            "public TestClass() { throw new RuntimeException("Stub!"); }",
+            "public void foo() { throw new RuntimeException("Stub!"); }",
+            "public void bar() { throw new RuntimeException("Stub!"); }",
+            "}"
+        ]
+        sort_methods(l) returns
+        [
+            "package android.test;",
+            "public static final int ORANGE = 1;",
+            "public class TestClass {",
+            "public TestClass() { throw new RuntimeException("Stub!"); }",
+            "public void bar() { throw new RuntimeException("Stub!"); }",
+            "public void foo() { throw new RuntimeException("Stub!"); }",
+            "}"
+        ]
+
+    Args:
+        lines: List of strings consisted of Java file contents.
+
+    Returns:
+        A list of string with sorted class methods.
+
+    """
+    def is_not_blank(l: str) -> bool:
+        return bool(l) and not l.isspace()
+
+    ret = list()
+
+    in_class = False
+    buffer = list()
+    for line in lines:
+        if not in_class:
+            if "class" in line:
+                in_class = True
+                ret.append(line)
+            else:
+                # Adding static variables, package info, etc.
+                # Skipping empty or space lines.
+                if is_not_blank(line):
+                    ret.append(line)
+        else:
+            # End of class
+            if line and line[0] == "}":
+                in_class = False
+                ret.extend(sorted(buffer))
+                buffer = list()
+                ret.append(line)
+            else:
+                if is_not_blank(line):
+                    buffer.append(line)
+
+    return ret
+
+def get_file_contents(file_path: str) -> List[str]:
+    lines = list()
+    with open(file_path) as f:
+        lines = [line.rstrip('\n') for line in f]
+        f.close()
+    return lines
+
+def pprint(l: List[str]) -> None:
+    for line in l:
+        print(line)
+
+def write_lines(out_dir: str, lines: List[str]) -> None:
+    with open(out_dir, "a") as f:
+        f.writelines(line + '\n' for line in lines)
+        f.write("\n")
+        f.close()
+
+def dir_exists(dir: str) -> bool:
+    return Path(dir).exists()
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument('first_dir', action='store', type=str,
+                        help="first path to compare file directory and contents")
+    parser.add_argument('second_dir', action='store', type=str,
+                        help="second path to compare file directory and contents")
+    parser.add_argument('--out', dest='out_dir',
+                        action='store', default="", type=str,
+                        help="optional directory to write log. If not set, will print to console")
+    parser.add_argument('--show-diff-file', dest='show_diff',
+                        action=argparse.BooleanOptionalAction,
+                        help="optional flag. If passed, will print out the content of the file unique to each directories")
+    parser.add_argument('--include-common', dest='include_common',
+                        action=argparse.BooleanOptionalAction,
+                        help="optional flag. If passed, will print out the contents common to both files as well,\
+                            instead of printing only diff lines.")
+    parser.add_argument('--skip-words', nargs='+',
+                        dest='skip_words', default=[], help="optional words to skip in comparison")
+
+    args = parser.parse_args()
+
+    if not args.first_dir or not args.second_dir:
+        parser.print_usage()
+        exit(0)
+
+    analyzer = FilesDiffAnalyzer(args)
+    analyzer.analyze()
diff --git a/tools/warn/html_writer.py b/tools/warn/html_writer.py
index 3fa822a..46ba253 100644
--- a/tools/warn/html_writer.py
+++ b/tools/warn/html_writer.py
@@ -56,6 +56,7 @@
 
 from __future__ import print_function
 import csv
+import datetime
 import html
 import sys
 
@@ -258,7 +259,7 @@
 
 
 def dump_stats(writer, warn_patterns):
-  """Dump some stats about total number of warnings and such."""
+  """Dump some stats about total number of warnings and date."""
 
   known = 0
   skipped = 0
@@ -279,6 +280,8 @@
   if total < 1000:
     extra_msg = ' (low count may indicate incremental build)'
   writer('Total number of warnings: <b>' + str(total) + '</b>' + extra_msg)
+  date_time_str = datetime.datetime.now().strftime('%Y/%m/%d %H:%M:%S')
+  writer('<p>(generated on ' + date_time_str + ')')
 
 
 # New base table of warnings, [severity, warn_id, project, warning_message]
@@ -662,15 +665,26 @@
   var warningsOfFiles = {};
   var warningsOfDirs = {};
   var subDirs = {};
-  function addOneWarning(map, key) {
-    map[key] = 1 + ((key in map) ? map[key] : 0);
+  function addOneWarning(map, key, type, unique) {
+    function increaseCounter(idx) {
+      map[idx] = 1 + ((idx in map) ? map[idx] : 0);
+    }
+    increaseCounter(key)
+    if (type != "") {
+      increaseCounter(type + " " + key)
+      if (unique) {
+        increaseCounter(type + " *")
+      }
+    }
   }
   for (var i = 0; i < numWarnings; i++) {
-    var file = WarningMessages[i].replace(/:.*/, "");
-    addOneWarning(warningsOfFiles, file);
+    var message = WarningMessages[i]
+    var file = message.replace(/:.*/, "");
+    var warningType = message.endsWith("]") ? message.replace(/.*\[/, "[") : "";
+    addOneWarning(warningsOfFiles, file, warningType, true);
     var dirs = file.split("/");
     var dir = dirs[0];
-    addOneWarning(warningsOfDirs, dir);
+    addOneWarning(warningsOfDirs, dir, warningType, true);
     for (var d = 1; d < dirs.length - 1; d++) {
       var subDir = dir + "/" + dirs[d];
       if (!(dir in subDirs)) {
@@ -678,7 +692,7 @@
       }
       subDirs[dir][subDir] = 1;
       dir = subDir;
-      addOneWarning(warningsOfDirs, dir);
+      addOneWarning(warningsOfDirs, dir, warningType, false);
     }
   }
   var minDirWarnings = numWarnings*(LimitPercentWarnings/100);
@@ -725,27 +739,33 @@
         document.getElementById(divName));
     table.draw(view, {allowHtml: true, alternatingRowStyle: true});
   }
-  addTable("Directory", "top_dirs_table", TopDirs, "selectDir");
-  addTable("File", "top_files_table", TopFiles, "selectFile");
+  addTable("[Warning Type] Directory", "top_dirs_table", TopDirs, "selectDir");
+  addTable("[Warning Type] File", "top_files_table", TopFiles, "selectFile");
 }
 function selectDirFile(idx, rows, dirFile) {
   if (rows.length <= idx) {
     return;
   }
   var name = rows[idx][2];
+  var type = "";
+  if (name.startsWith("[")) {
+    type = " " + name.replace(/ .*/, "");
+    name = name.replace(/.* /, "");
+  }
   var spanName = "selected_" + dirFile + "_name";
-  document.getElementById(spanName).innerHTML = name;
+  document.getElementById(spanName).innerHTML = name + type;
   var divName = "selected_" + dirFile + "_warnings";
   var numWarnings = rows[idx][1].v;
   var prefix = name.replace(/\\.\\.\\.$/, "");
   var data = new google.visualization.DataTable();
-  data.addColumn('string', numWarnings + ' warnings in ' + name);
+  data.addColumn('string', numWarnings + type + ' warnings in ' + name);
   var getWarningMessage = (FlagPlatform == "chrome")
         ? ((x) => addURLToLine(WarningMessages[Warnings[x][2]],
                                WarningLinks[Warnings[x][3]]))
         : ((x) => addURL(WarningMessages[Warnings[x][2]]));
   for (var i = 0; i < Warnings.length; i++) {
-    if (WarningMessages[Warnings[i][2]].startsWith(prefix)) {
+    if ((prefix.startsWith("*") || WarningMessages[Warnings[i][2]].startsWith(prefix)) &&
+        (type == "" || WarningMessages[Warnings[i][2]].endsWith(type))) {
       data.addRow([getWarningMessage(i)]);
     }
   }
@@ -827,14 +847,14 @@
   def section2():
     dump_dir_file_section(
         writer, 'directory', 'top_dirs_table',
-        'Directories with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% warnings')
+        'Directories/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all cases')
   def section3():
     dump_dir_file_section(
         writer, 'file', 'top_files_table',
-        'Files with at least ' +
-        str(LIMIT_PERCENT_WARNINGS) + '% or ' +
-        str(LIMIT_WARNINGS_PER_FILE) + ' warnings')
+        'Files/Warnings with at least ' +
+        str(LIMIT_PERCENT_WARNINGS) + '% of all or ' +
+        str(LIMIT_WARNINGS_PER_FILE) + ' cases')
   def section4():
     writer('<script>')
     emit_js_data(writer, flags, warning_messages, warning_links,
diff --git a/tools/warn/warn_common.py b/tools/warn/warn_common.py
index 61c8676..aa68313 100755
--- a/tools/warn/warn_common.py
+++ b/tools/warn/warn_common.py
@@ -64,6 +64,10 @@
 from . import tidy_warn_patterns as tidy_patterns
 
 
+# Location of this file is used to guess the root of Android source tree.
+THIS_FILE_PATH = 'build/make/tools/warn/warn_common.py'
+
+
 def parse_args(use_google3):
   """Define and parse the args. Return the parse_args() result."""
   parser = argparse.ArgumentParser(
@@ -217,17 +221,27 @@
   return link
 
 
-def find_warn_py_and_android_root(path):
-  """Return android source root path if warn.py is found."""
+def find_this_file_and_android_root(path):
+  """Return android source root path if this file is found."""
   parts = path.split('/')
   for idx in reversed(range(2, len(parts))):
     root_path = '/'.join(parts[:idx])
     # Android root directory should contain this script.
-    if os.path.exists(root_path + '/build/make/tools/warn.py'):
+    if os.path.exists(root_path + '/' + THIS_FILE_PATH):
       return root_path
   return ''
 
 
+def find_android_root_top_dirs(root_dir):
+  """Return a list of directories under the root_dir, if it exists."""
+  # Root directory should contain at least build/make and build/soong.
+  if (not os.path.isdir(root_dir + '/build/make') or
+      not os.path.isdir(root_dir + '/build/soong')):
+    return None
+  return list(filter(lambda d: os.path.isdir(root_dir + '/' + d),
+                     os.listdir(root_dir)))
+
+
 def find_android_root(buildlog):
   """Guess android source root from common prefix of file paths."""
   # Use the longest common prefix of the absolute file paths
@@ -239,8 +253,8 @@
     # We want to find android_root of a local build machine.
     # Do not use RBE warning lines, which has '/b/f/w/' path prefix.
     # Do not use /tmp/ file warnings.
-    if warning_pattern.match(line) and (
-        '/b/f/w' not in line and not line.startswith('/tmp/')):
+    if ('/b/f/w' not in line and not line.startswith('/tmp/') and
+        warning_pattern.match(line)):
       warning_lines.append(line)
       count += 1
       if count > 9999:
@@ -249,17 +263,26 @@
       # the source tree root.
       if count < 100:
         path = os.path.normpath(re.sub(':.*$', '', line))
-        android_root = find_warn_py_and_android_root(path)
+        android_root = find_this_file_and_android_root(path)
         if android_root:
-          return android_root
+          return android_root, find_android_root_top_dirs(android_root)
   # Do not use common prefix of a small number of paths.
+  android_root = ''
   if count > 10:
     # pytype: disable=wrong-arg-types
     root_path = os.path.commonprefix(warning_lines)
     # pytype: enable=wrong-arg-types
     if len(root_path) > 2 and root_path[len(root_path) - 1] == '/':
-      return root_path[:-1]
-  return ''
+      android_root = root_path[:-1]
+  if android_root and os.path.isdir(android_root):
+    return android_root, find_android_root_top_dirs(android_root)
+  # When the build.log file is moved to a different machine where
+  # android_root is not found, use the location of this script
+  # to find the android source tree sub directories.
+  if __file__.endswith('/' + THIS_FILE_PATH):
+    script_root = __file__.replace('/' + THIS_FILE_PATH, '')
+    return android_root, find_android_root_top_dirs(script_root)
+  return android_root, None
 
 
 def remove_android_root_prefix(path, android_root):
@@ -310,8 +333,6 @@
   warning_pattern = re.compile(chrome_warning_pattern)
 
   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
   unique_warnings = dict()
   for line in infile:
     if warning_pattern.match(line):
@@ -353,8 +374,7 @@
   target_product = 'unknown'
   target_variant = 'unknown'
   build_id = 'unknown'
-  use_rbe = False
-  android_root = find_android_root(infile)
+  android_root, root_top_dirs = find_android_root(infile)
   infile.seek(0)
 
   # rustc warning messages have two lines that should be combined:
@@ -367,24 +387,39 @@
   # C/C++ compiler warning messages have line and column numbers:
   #     some/path/file.c:line_number:column_number: warning: description
   warning_pattern = re.compile('(^[^ ]*/[^ ]*: warning: .*)|(^warning: .*)')
-  warning_without_file = re.compile('^warning: .*')
   rustc_file_position = re.compile('^[ ]+--> [^ ]*/[^ ]*:[0-9]+:[0-9]+')
 
-  # If RBE was used, try to reclaim some warning lines mixed with some
-  # leading chars from other concurrent job's stderr output .
+  # If RBE was used, try to reclaim some warning lines (from stdout)
+  # that contain leading characters from stderr.
   # The leading characters can be any character, including digits and spaces.
-  # It's impossible to correctly identify the starting point of the source
-  # file path without the file directory name knowledge.
-  # Here we can only be sure to recover lines containing "/b/f/w/".
-  rbe_warning_pattern = re.compile('.*/b/f/w/[^ ]*: warning: .*')
 
-   # Collect all unique warning lines
-  # Remove the duplicated warnings save ~8% of time when parsing
-  # one typical build log than before
+  # If a warning line's source file path contains the special RBE prefix
+  # /b/f/w/, we can remove all leading chars up to and including the "/b/f/w/".
+  bfw_warning_pattern = re.compile('.*/b/f/w/([^ ]*: warning: .*)')
+
+  # When android_root is known and available, we find its top directories
+  # and remove all leading chars before a top directory name.
+  # We assume that the leading chars from stderr do not contain "/".
+  # For example,
+  #   10external/...
+  #   12 warningsexternal/...
+  #   413 warningexternal/...
+  #   5 warnings generatedexternal/...
+  #   Suppressed 1000 warnings (packages/modules/...
+  if root_top_dirs:
+    extra_warning_pattern = re.compile(
+        '^.[^/]*((' + '|'.join(root_top_dirs) +
+        ')/[^ ]*: warning: .*)')
+  else:
+    extra_warning_pattern = re.compile('^[^/]* ([^ /]*/[^ ]*: warning: .*)')
+
+  # Collect all unique warning lines
   unique_warnings = dict()
+  checked_warning_lines = dict()
   line_counter = 0
   prev_warning = ''
   for line in infile:
+    line_counter += 1
     if prev_warning:
       if rustc_file_position.match(line):
         # must be a rustc warning, combine 2 lines into one warning
@@ -399,14 +434,31 @@
           prev_warning, flags, android_root, unique_warnings)
       prev_warning = ''
 
-    if use_rbe and rbe_warning_pattern.match(line):
-      cleaned_up_line = re.sub('.*/b/f/w/', '', line)
-      unique_warnings = add_normalized_line_to_warnings(
-          cleaned_up_line, flags, android_root, unique_warnings)
+    # re.match is slow, with several warning line patterns and
+    # long input lines like "TIMEOUT: ...".
+    # We save significant time by skipping non-warning lines.
+    # But do not skip the first 100 lines, because we want to
+    # catch build variables.
+    if line_counter > 100 and line.find('warning: ') < 0:
       continue
 
+    # A large clean build output can contain up to 90% of duplicated
+    # "warning:" lines. If we can skip them quickly, we can
+    # speed up this for-loop 3X to 5X.
+    if line in checked_warning_lines:
+      continue
+    checked_warning_lines[line] = True
+
+    # Clean up extra prefix that could be introduced when RBE was used.
+    if '/b/f/w/' in line:
+      result = bfw_warning_pattern.search(line)
+    else:
+      result = extra_warning_pattern.search(line)
+    if result is not None:
+      line = result.group(1)
+
     if warning_pattern.match(line):
-      if warning_without_file.match(line):
+      if line.startswith('warning: '):
         # save this line and combine it with the next line
         prev_warning = line
       else:
@@ -416,7 +468,6 @@
 
     if line_counter < 100:
       # save a little bit of time by only doing this for the first few lines
-      line_counter += 1
       result = re.search('(?<=^PLATFORM_VERSION=).*', line)
       if result is not None:
         platform_version = result.group(0)
@@ -433,13 +484,6 @@
       if result is not None:
         build_id = result.group(0)
         continue
-      result = re.search('(?<=^TOP=).*', line)
-      if result is not None:
-        android_root = result.group(1)
-        continue
-      if re.search('USE_RBE=', line) is not None:
-        use_rbe = True
-        continue
 
   if android_root:
     new_unique_warnings = dict()
diff --git a/tools/whichgit b/tools/whichgit
new file mode 100755
index 0000000..b0bf2e4
--- /dev/null
+++ b/tools/whichgit
@@ -0,0 +1,110 @@
+#!/usr/bin/env python3
+
+import argparse
+import os
+import subprocess
+import sys
+
+def get_build_var(var):
+  return subprocess.run(["build/soong/soong_ui.bash","--dumpvar-mode", var],
+                        check=True, capture_output=True, text=True).stdout.strip()
+
+
+def get_sources(modules):
+  result = subprocess.run(["./prebuilts/build-tools/linux-x86/bin/ninja", "-f",
+                           "out/combined-" + os.environ["TARGET_PRODUCT"] + ".ninja",
+                           "-t", "inputs", "-d", ] + modules,
+                          stderr=subprocess.STDOUT, stdout=subprocess.PIPE, check=False, text=True)
+  if result.returncode != 0:
+    sys.stderr.write(result.stdout)
+    sys.exit(1)
+  return set([f for f in result.stdout.split("\n") if not f.startswith("out/")])
+
+
+def m_nothing():
+  result = subprocess.run(["build/soong/soong_ui.bash", "--build-mode", "--all-modules",
+                           "--dir=" + os.getcwd(), "nothing"],
+                           check=False, stderr=subprocess.STDOUT, stdout=subprocess.PIPE, text=True)
+  if result.returncode != 0:
+    sys.stderr.write(result.stdout)
+    sys.exit(1)
+
+
+def get_git_dirs():
+  text = subprocess.run(["repo","list"], check=True, capture_output=True, text=True).stdout
+  return [line.split(" : ")[0] + "/" for line in text.split("\n")]
+
+
+def get_referenced_projects(git_dirs, files):
+  # files must be sorted
+  referenced_dirs = set()
+  prev_dir = None
+  for f in files:
+    # Optimization is ~5x speedup for large sets of files
+    if prev_dir:
+      if f.startswith(prev_dir):
+        referenced_dirs.add(d)
+        continue
+    for d in git_dirs:
+      if f.startswith(d):
+        referenced_dirs.add(d)
+        prev_dir = d
+        break
+  return [d[0:-1] for d in referenced_dirs]
+
+
+def main(argv):
+  # Argument parsing
+  ap = argparse.ArgumentParser(description="List the required git projects for the given modules")
+  ap.add_argument("--products", nargs="*",
+                  help="The TARGET_PRODUCT to check. If not provided just uses whatever has"
+                        + " already been built")
+  ap.add_argument("--variants", nargs="*",
+                  help="The TARGET_BUILD_VARIANTS to check. If not provided just uses whatever has"
+                        + " already been built, or eng if --products is supplied")
+  ap.add_argument("--modules", nargs="*",
+                  help="The build modules to check, or droid it not supplied")
+  ap.add_argument("--why", nargs="*",
+                  help="Also print the input files used in these projects, or \"*\" for all")
+  args = ap.parse_args(argv[1:])
+
+  modules = args.modules if args.modules else ["droid"]
+
+  # Get the list of sources for all of the requested build combos
+  if not args.products and not args.variants:
+    sources = get_sources(modules)
+  else:
+    if not args.products:
+      sys.stderr.write("Error: --products must be supplied if --variants is supplied")
+      sys.exit(1)
+    sources = set()
+    build_num = 1
+    for product in args.products:
+      os.environ["TARGET_PRODUCT"] = product
+      variants = args.variants if args.variants else ["user", "userdebug", "eng"]
+      for variant in variants:
+        sys.stderr.write(f"Analyzing build {build_num} of {len(args.products)*len(variants)}\r")
+        os.environ["TARGET_BUILD_VARIANT"] = variant
+        m_nothing()
+        sources.update(get_sources(modules))
+        build_num += 1
+    sys.stderr.write("\n\n")
+
+  sources = sorted(sources)
+
+  # Print the list of git directories that has one or more of the sources in it
+  for project in sorted(get_referenced_projects(get_git_dirs(), sources)):
+    print(project)
+    if args.why:
+      if "*" in args.why or project in args.why:
+        prefix = project + "/"
+        for f in sources:
+          if f.startswith(prefix):
+            print("  " + f)
+
+
+if __name__ == "__main__":
+  sys.exit(main(sys.argv))
+
+
+# vim: set ts=2 sw=2 sts=2 expandtab nocindent tw=100:
diff --git a/tools/zipalign/Android.bp b/tools/zipalign/Android.bp
index 8cab04c..0e1d58e 100644
--- a/tools/zipalign/Android.bp
+++ b/tools/zipalign/Android.bp
@@ -70,6 +70,7 @@
         "libgmock",
     ],
     data: [
+         "tests/data/archiveWithOneDirectoryEntry.zip",
          "tests/data/diffOrders.zip",
          "tests/data/holes.zip",
          "tests/data/unaligned.zip",
diff --git a/tools/zipalign/ZipAlign.cpp b/tools/zipalign/ZipAlign.cpp
index 08f67ff..23840e3 100644
--- a/tools/zipalign/ZipAlign.cpp
+++ b/tools/zipalign/ZipAlign.cpp
@@ -22,6 +22,19 @@
 
 namespace android {
 
+// An entry is considered a directory if it has a stored size of zero
+// and it ends with '/' or '\' character.
+static bool isDirectory(ZipEntry* entry) {
+   if (entry->getUncompressedLen() != 0) {
+       return false;
+   }
+
+   const char* name = entry->getFileName();
+   size_t nameLength = strlen(name);
+   char lastChar = name[nameLength-1];
+   return lastChar == '/' || lastChar == '\\';
+}
+
 static int getAlignment(bool pageAlignSharedLibs, int defaultAlignment,
     ZipEntry* pEntry) {
 
@@ -59,7 +72,7 @@
             return 1;
         }
 
-        if (pEntry->isCompressed()) {
+        if (pEntry->isCompressed() || isDirectory(pEntry)) {
             /* copy the entry without padding */
             //printf("--- %s: orig at %ld len=%ld (compressed)\n",
             //    pEntry->getFileName(), (long) pEntry->getFileOffset(),
@@ -160,7 +173,13 @@
                 printf("%8jd %s (OK - compressed)\n",
                     (intmax_t) pEntry->getFileOffset(), pEntry->getFileName());
             }
-        } else {
+        } else if(isDirectory(pEntry)) {
+            // Directory entries do not need to be aligned.
+            if (verbose)
+                printf("%8jd %s (OK - directory)\n",
+                       (intmax_t) pEntry->getFileOffset(), pEntry->getFileName());
+            continue;
+       } else {
             off_t offset = pEntry->getFileOffset();
             const int alignTo = getAlignment(pageAlignSharedLibs, alignment, pEntry);
             if ((offset % alignTo) != 0) {
diff --git a/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip b/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip
new file mode 100644
index 0000000..00be0ce
--- /dev/null
+++ b/tools/zipalign/tests/data/archiveWithOneDirectoryEntry.zip
Binary files differ
diff --git a/tools/zipalign/tests/src/align_test.cpp b/tools/zipalign/tests/src/align_test.cpp
index ff45187..a8433fa 100644
--- a/tools/zipalign/tests/src/align_test.cpp
+++ b/tools/zipalign/tests/src/align_test.cpp
@@ -12,6 +12,28 @@
 using namespace android;
 using namespace base;
 
+// This load the whole file to memory so be careful!
+static bool sameContent(const std::string& path1, const std::string& path2) {
+  std::string f1;
+  if (!ReadFileToString(path1, &f1)) {
+    printf("Unable to read '%s' content: %m\n", path1.c_str());
+    return false;
+  }
+
+  std::string f2;
+  if (!ReadFileToString(path2, &f2)) {
+    printf("Unable to read '%s' content %m\n", path1.c_str());
+    return false;
+  }
+
+  if (f1.size() != f2.size()) {
+    printf("File '%s' and '%s' are not the same\n", path1.c_str(), path2.c_str());
+    return false;
+  }
+
+  return f1.compare(f2) == 0;
+}
+
 static std::string GetTestPath(const std::string& filename) {
   static std::string test_data_dir = android::base::GetExecutableDirectory() + "/tests/data/";
   return test_data_dir + filename;
@@ -87,3 +109,21 @@
   int verified = verify(dst.c_str(), 4, false, true);
   ASSERT_EQ(0, verified);
 }
+
+TEST(Align, DirectoryEntryDoNotRequireAlignment) {
+  const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
+  int verified = verify(src.c_str(), 4, false, true);
+  ASSERT_EQ(0, verified);
+}
+
+TEST(Align, DirectoryEntry) {
+  const std::string src = GetTestPath("archiveWithOneDirectoryEntry.zip");
+  const std::string dst = GetTempPath("archiveWithOneDirectoryEntry_out.zip");
+
+  int processed = process(src.c_str(), dst.c_str(), 4, true, false, 4096);
+  ASSERT_EQ(0, processed);
+  ASSERT_EQ(true, sameContent(src, dst));
+
+  int verified = verify(dst.c_str(), 4, false, true);
+  ASSERT_EQ(0, verified);
+}