Merge "Revert "Remove all ZIP64LIMIT hack""
diff --git a/Changes.md b/Changes.md
index 3ad2641..daebd52 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,21 @@
 # Build System Changes for Android.mk Writers
 
+## Python 2 to 3 migration
+
+The path set when running builds now makes the `python` executable point to python 3,
+whereas on previous versions it pointed to python 2. If you still have python 2 scripts,
+you can change the shebang line to use `python2` explicitly. This only applies for
+scripts run directly from makefiles, or from soong genrules. This behavior can be
+temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment
+variable to `true`. It's only an environment variable and not a product config variable
+because product config sometimes calls python code.
+
+In addition, `python_*` soong modules no longer allow python 2. This can be temporarily
+overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration
+variable to `true`.
+
+Python 2 is slated for complete removal in V.
+
 ## Stop referencing sysprop_library directly from cc modules
 
 For the migration to Bazel, we are no longer mapping sysprop_library targets
@@ -818,7 +834,7 @@
 
 ### Stop using clang property
 
-Clang has been deleted from Soong. To fix any build errors, remove the clang
+The clang property has been deleted from Soong. To fix any build errors, remove the clang
 property from affected Android.bp files using bpmodify.
 
 
@@ -860,6 +876,39 @@
 the makefile system. If you need one of them, you'll have to set up your own
 version.
 
+## Soong config variables
+
+### Soong config string variables must list all values they can be set to
+
+In order to facilitate the transition to bazel, all soong_config_string_variables
+must only be set to a value listed in their `values` property, or an empty string.
+It is a build error otherwise.
+
+Example Android.bp:
+```
+soong_config_string_variable {
+    name: "my_string_variable",
+    values: [
+        "foo",
+        "bar",
+    ],
+}
+
+soong_config_module_type {
+    name: "my_cc_defaults",
+    module_type: "cc_defaults",
+    config_namespace: "my_namespace",
+    variables: ["my_string_variable"],
+    properties: [
+        "shared_libs",
+        "static_libs",
+    ],
+}
+```
+Product config:
+```
+$(call soong_config_set,my_namespace,my_string_variable,baz) # Will be an error as baz is not listed in my_string_variable's values.
+```
 
 [build/soong/Changes.md]: https://android.googlesource.com/platform/build/soong/+/master/Changes.md
 [build/soong/docs/best_practices.md#headers]: https://android.googlesource.com/platform/build/soong/+/master/docs/best_practices.md#headers
diff --git a/OWNERS b/OWNERS
index 8a1cc34..97fda40 100644
--- a/OWNERS
+++ b/OWNERS
@@ -1,4 +1,7 @@
 include platform/build/soong:/OWNERS
 
-# Finalization scripts
-per-file finalize* = smoreland@google.com, alexbuy@google.com
+# Since this file affects all Android developers, lock it down. There is still
+# round the world timzeone coverage.
+per-file envsetup.sh = joeo@google.com, jingwen@google.com, lberki@google.com
+per-file shell_utils.sh = joeo@google.com, jingwen@google.com, lberki@google.com
+
diff --git a/core/BUILD.bazel b/core/BUILD.bazel
new file mode 100644
index 0000000..3e69e62
--- /dev/null
+++ b/core/BUILD.bazel
@@ -0,0 +1,4 @@
+# Export tradefed templates for tests.
+exports_files(
+    glob(["*.xml"]),
+)
diff --git a/core/Makefile b/core/Makefile
index 77ded9c..85f33bc 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -181,6 +181,7 @@
 
 ifeq ($(HOST_OS),linux)
 $(call dist-for-goals,sdk,$(API_FINGERPRINT))
+$(call dist-for-goals,droidcore,$(API_FINGERPRINT))
 endif
 
 INSTALLED_RECOVERYIMAGE_TARGET :=
@@ -306,6 +307,8 @@
 # $(7): module archive
 # $(8): staging dir for stripped modules
 # $(9): module directory name
+# $(10): extra modules that might be dependency of modules in this partition, but should not be copied to output dir
+# $(11): mount point for extra modules
 # Returns a list of src:dest pairs to install the modules using copy-many-files.
 define build-image-kernel-modules
   $(if $(9), \
@@ -317,7 +320,7 @@
       $(eval _src := $(8)/$(notdir $(module))) \
       $(eval $(call copy-and-strip-kernel-module,$(module),$(_src)))) \
     $(_src):$(2)/lib/modules/$(_dir)$(notdir $(module))) \
-  $(eval $(call build-image-kernel-modules-depmod,$(1),$(3),$(4),$(5),$(6),$(7),$(2),$(9))) \
+  $(eval $(call build-image-kernel-modules-depmod,$(1),$(3),$(4),$(5),$(6),$(7),$(2),$(9),$(10),$(11))) \
   $(4)/$(DEPMOD_STAGING_SUBDIR)/modules.dep:$(2)/lib/modules/$(_dir)modules.dep \
   $(4)/$(DEPMOD_STAGING_SUBDIR)/modules.alias:$(2)/lib/modules/$(_dir)modules.alias \
   $(4)/$(DEPMOD_STAGING_SUBDIR)/modules.softdep:$(2)/lib/modules/$(_dir)modules.softdep \
@@ -332,6 +335,8 @@
 # $(6): module archive
 # $(7): output dir
 # $(8): module directory name
+# $(9): extra modules which should not be copied to output dir, but might be dependency of modules in this partition
+# $(10): mount point for extra modules
 # TODO(b/144844424): If a module archive is being used, this step (which
 # generates obj/PACKAGING/.../modules.dep) also unzips the module archive into
 # the output directory. This should be moved to a module with a
@@ -341,8 +346,11 @@
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: .KATI_IMPLICIT_OUTPUTS := $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.alias $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.softdep $(3)/$(DEPMOD_STAGING_SUBDIR)/$(5)
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: $(DEPMOD)
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_MODULES := $(strip $(1))
+$(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_EXTRA_MODULES := $(strip $(9))
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_MOUNT_POINT := $(2)
+$(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_EXTRA_MOUNT_POINT := $(10)
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_MODULE_DIR := $(3)/$(DEPMOD_STAGING_SUBDIR)/$(2)/lib/modules/$(8)
+$(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_EXTRA_MODULE_DIR := $(3)/$(DEPMOD_STAGING_SUBDIR)/$(10)/lib/modules/$(8)
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_STAGING_DIR := $(3)
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_LOAD_MODULES := $(strip $(4))
 $(3)/$(DEPMOD_STAGING_SUBDIR)/modules.dep: PRIVATE_LOAD_FILE := $(3)/$(DEPMOD_STAGING_SUBDIR)/$(5)
@@ -364,6 +372,20 @@
 	    basename $$$$MODULE >> $$(PRIVATE_LOAD_FILE); \
 	  done; \
 	)
+	# The ln -sf + find -delete sequence is to remove any modules in
+	# PRIVATE_EXTRA_MODULES which have same basename as MODULES in PRIVATE_MODULES
+	# Basically, it computes a set difference. When there is a duplicate module
+	# present in both directories, we want modules in PRIVATE_MODULES to take
+	# precedence. Since depmod does not provide any guarantee about ordering of
+	# dependency resolution, we achieve this by maually removing any duplicate
+	# modules with lower priority.
+	$(if $(9),\
+	  mkdir -p $$(PRIVATE_EXTRA_MODULE_DIR); \
+	  find $$(PRIVATE_EXTRA_MODULE_DIR) -maxdepth 1 -type f -name "*.ko" -delete; \
+	  cp $$(PRIVATE_EXTRA_MODULES) $$(PRIVATE_EXTRA_MODULE_DIR); \
+	  ln -sf $$(PRIVATE_MODULE_DIR)/*.ko $$(PRIVATE_EXTRA_MODULE_DIR); \
+	  find $$(PRIVATE_EXTRA_MODULE_DIR) -type l -delete; \
+	)
 	$(DEPMOD) -b $$(PRIVATE_STAGING_DIR) 0.0
 	# Turn paths in modules.dep into absolute paths
 	sed -i.tmp -e 's|\([^: ]*lib/modules/[^: ]*\)|/\1|g' $$(PRIVATE_STAGING_DIR)/$$(DEPMOD_STAGING_SUBDIR)/modules.dep
@@ -437,6 +459,9 @@
 # $(4): module load filename
 # $(5): stripped staging directory
 # $(6): kernel module directory name (top is an out of band value for no directory)
+# $(7): list of extra modules that might be dependency of modules in this partition
+# $(8): mount point for extra modules. e.g. system
+
 define build-image-kernel-modules-dir
 $(if $(filter top,$(6)),\
   $(eval _kver :=)$(eval _sep :=),\
@@ -447,7 +472,12 @@
 $(if $(strip $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver))$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver))),\
   $(if $(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),,\
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)))) \
-  $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver)))) \
+  $(if $(filter false,$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver))),\
+    $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) :=),) \
+  $(eval _files := $(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver),$(7),$(8))) \
+  $(call copy-many-files,$(_files)) \
+  $(eval _modules := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)) ANDROID-GEN ANDROID-GEN ANDROID-GEN ANDROID-GEN) \
+  $(eval KERNEL_MODULE_COPY_FILES += $(join $(addsuffix :,$(_modules)),$(_files)))) \
 $(if $(_kver), \
   $(eval _dir := $(_kver)/), \
   $(eval _dir :=)) \
@@ -460,6 +490,7 @@
   $(eval $(call build-image-kernel-modules-blocklist-file, \
     $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
     $(2)/lib/modules/$(_dir)modules.blocklist)) \
+  $(eval ALL_KERNEL_MODULES_BLOCKLIST += $(2)/lib/modules/$(_dir)modules.blocklist) \
   $(2)/lib/modules/$(_dir)modules.blocklist)
 endef
 
@@ -482,6 +513,15 @@
 endef
 
 # $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-kernel-ramdisk-recovery-load
+$(if $(filter top,$(1)),\
+  $(eval _kver :=)$(eval _sep :=),\
+  $(eval _kver := $(1))$(eval _sep :=_))\
+  $(if $(BOARD_VENDOR_KERNEL_RAMDISK_RECOVERY_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+    $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_kernel_ramdisk_recovery_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_RECOVERY_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.recovery,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT))))
+endef
+
+# $(1): kernel module directory name (top is an out of band value for no directory)
 define build-vendor-charger-load
 $(if $(filter top,$(1)),\
   $(eval _kver :=)$(eval _sep :=),\
@@ -532,6 +572,14 @@
 endif
 
 BOARD_KERNEL_MODULE_DIRS += top
+
+# Default to not generating modules.dep for kernel modules on system
+# side. We should only load these modules if they are depended by vendor
+# side modules.
+ifeq ($(BOARD_SYSTEM_KERNEL_MODULES_LOAD),)
+  BOARD_SYSTEM_KERNEL_MODULES_LOAD := false
+endif
+
 $(foreach kmd,$(BOARD_KERNEL_MODULE_DIRS), \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,RECOVERY,$(TARGET_RECOVERY_ROOT_OUT),,modules.load.recovery,$(RECOVERY_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval vendor_ramdisk_fragment := $(KERNEL_MODULE_DIR_VENDOR_RAMDISK_FRAGMENT_$(kmd))) \
@@ -544,13 +592,28 @@
   $(eval $(result_var) += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(output_dir),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR_KERNEL_RAMDISK,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT),,modules.load,$(VENDOR_KERNEL_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(kmd))) \
-  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-recovery-load,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd),$(BOARD_SYSTEM_KERNEL_MODULES),system)) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,,$(kmd)))))
 
+ifeq ($(BOARD_SYSTEM_KERNEL_MODULES),)
+ifneq ($(BOARD_SYSTEM_DLKM_SRC),)
+ifneq ($(wildcard $(BOARD_SYSTEM_DLKM_SRC)/*),)
+  SYSTEM_KERNEL_MODULES := $(shell find $(BOARD_SYSTEM_DLKM_SRC) -type f)
+  SRC_SYSTEM_KERNEL_MODULES := $(SYSTEM_KERNEL_MODULES)
+  DST_SYSTEM_KERNEL_MODULES := $(patsubst $(BOARD_SYSTEM_DLKM_SRC)/%,:$(TARGET_OUT_SYSTEM_DLKM)/%,$(SRC_SYSTEM_KERNEL_MODULES))
+  SYSTEM_KERNEL_MODULE_COPY_PAIRS := $(join $(SRC_SYSTEM_KERNEL_MODULES),$(DST_SYSTEM_KERNEL_MODULES))
+  ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(SYSTEM_KERNEL_MODULE_COPY_PAIRS))
+endif
+endif
+endif
+
+
 # -----------------------------------------------------------------
 # Cert-to-package mapping.  Used by the post-build signing tools.
 # Use a macro to add newline to each echo command
@@ -705,12 +768,6 @@
 $(call dist-for-goals,droidcore-unbundled,$(WALL_WERROR))
 
 # -----------------------------------------------------------------
-# C/C++ flag information for modules
-$(call dist-for-goals,droidcore-unbundled,$(SOONG_MODULES_CFLAG_ARTIFACTS))
-
-$(foreach a,$(SOONG_MODULES_CFLAG_ARTIFACTS),$(call declare-0p-target,$(call word-colon,1,$(a))))
-
-# -----------------------------------------------------------------
 # Modules missing profile files
 PGO_PROFILE_MISSING := $(PRODUCT_OUT)/pgo_profile_file_missing.txt
 $(PGO_PROFILE_MISSING):
@@ -889,16 +946,19 @@
 RAMDISK_EXT := .gz
 endif
 
+# This file contains /dev nodes description added to the generic ramdisk
+RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list
+
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
 $(INSTALLED_RAMDISK_TARGET): PRIVATE_DIRS := debug_ramdisk dev metadata mnt proc second_stage_resources sys
-$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
+$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(RAMDISK_NODE_LIST) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
 	$(call pretty,"Target ramdisk: $@")
 	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/,$(PRIVATE_DIRS))
 ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
 	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/first_stage_ramdisk/,$(PRIVATE_DIRS))
 endif
-	$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
+	$(hide) $(MKBOOTFS) -n $(RAMDISK_NODE_LIST) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
 
 $(call declare-1p-container,$(INSTALLED_RAMDISK_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_RAMDISK_TARGET),$(INTERNAL_RAMDISK_FILE),$(PRODUCT_OUT)/:/)
@@ -1162,6 +1222,7 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 	cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) \
@@ -1230,10 +1291,11 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(INTERNAL_PREBUILT_INIT_BOOT_IMAGE) $(AVBTOOL) $(BOARD_AVB_INIT_BOOT_KEY_PATH)
 	cp $(INTERNAL_PREBUILT_INIT_BOOT_IMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \
-	    --partition_name boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
+	    --partition_name init_boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
 	    $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS)
 
 $(call declare-1p-container,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
@@ -1579,6 +1641,21 @@
 target_system_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.xml.gz
 installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
 
+ALL_INSTALLED_NOTICE_FILES := \
+  $(installed_notice_html_or_xml_gz) \
+  $(installed_vendor_notice_xml_gz) \
+  $(installed_product_notice_xml_gz) \
+  $(installed_system_ext_notice_xml_gz) \
+  $(installed_odm_notice_xml_gz) \
+  $(installed_vendor_dlkm_notice_xml_gz) \
+  $(installed_odm_dlkm_notice_xml_gz) \
+  $(installed_system_dlkm_notice_xml_gz) \
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system_ext/etc/NOTICE.xml.gz
+define is-notice-file
+$(if $(findstring $1,$(ALL_INSTALLED_NOTICE_FILES)),Y)
+endef
+
 # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
@@ -2904,7 +2981,7 @@
 
 endif # BUILDING_DEBUG_BOOT_IMAGE || BUILDING_DEBUG_VENDOR_BOOT_IMAGE
 
-
+PARTITION_COMPAT_SYMLINKS :=
 # Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
 # $1: from location (e.g $(TARGET_OUT)/vendor)
 # $2: destination location (e.g. /vendor)
@@ -2922,6 +2999,7 @@
 	ln -sfn $2 $1
 $1: .KATI_SYMLINK_OUTPUTS := $1
 )
+$(eval PARTITION_COMPAT_SYMLINKS += $1)
 $1
 endef
 
@@ -2949,6 +3027,8 @@
   $(ALL_DEFAULT_INSTALLED_MODULES)))
 
 define fsverity-generate-metadata
+$(call declare-0p-target,$(1).fsv_meta)
+
 $(1).fsv_meta: PRIVATE_SRC := $(1)
 $(1).fsv_meta: PRIVATE_FSVERITY := $(HOST_OUT_EXECUTABLES)/fsverity
 $(1).fsv_meta: $(HOST_OUT_EXECUTABLES)/fsverity_metadata_generator $(HOST_OUT_EXECUTABLES)/fsverity $(1)
@@ -2987,12 +3067,10 @@
     $$(fsverity-metadata-targets-$(2))
 	rm -rf $$(PRIVATE_ASSETS)
 	mkdir -p $$(PRIVATE_ASSETS)
-ifdef fsverity-metadata-targets-$(2)
 	$$< --fsverity-path $$(PRIVATE_FSVERITY) \
 	    --base-dir $$(PRODUCT_OUT) \
 	    --output $$(PRIVATE_ASSETS)/build_manifest.pb \
 	    $$(PRIVATE_INPUTS)
-endif  # fsverity-metadata-targets-$(2)
 	$$(PRIVATE_AAPT2) link -o $$@ \
 	    -A $$(PRIVATE_ASSETS) \
 	    -I $$(PRIVATE_FRAMEWORK_RES) \
@@ -3005,15 +3083,19 @@
 	    --cert $$(PRIVATE_KEY).x509.pem \
 	    --key $$(PRIVATE_KEY).pk8
 
-ALL_DEFAULT_INSTALLED_MODULES += $(1)
+$(1).idsig: $(1)
+
+ALL_DEFAULT_INSTALLED_MODULES += $(1) $(1).idsig
 
 endef  # fsverity-generate-and-install-manifest-apk
 
 $(eval $(call fsverity-generate-and-install-manifest-apk, \
   $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk,system))
+ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk.idsig
 ifdef BUILDING_SYSTEM_EXT_IMAGE
   $(eval $(call fsverity-generate-and-install-manifest-apk, \
     $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk,system_ext))
+  ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk.idsig
 endif
 
 endif  # PRODUCT_FSVERITY_GENERATE_METADATA
@@ -3028,17 +3110,23 @@
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  _vendor_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_vendor_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_symlink)
 endif
 
 # Create symlink /system/product to /product if necessary.
 ifdef BOARD_USES_PRODUCTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  _product_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_product_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_product_symlink)
 endif
 
 # Create symlink /system/system_ext to /system_ext if necessary.
 ifdef BOARD_USES_SYSTEM_EXTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  _systemext_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_systemext_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_systemext_symlink)
 endif
 
 # -----------------------------------------------------------------
@@ -3051,7 +3139,9 @@
 # - /system/lib/modules is a symlink to a directory that stores system DLKMs.
 # - The system_dlkm partition is mounted at /system_dlkm at runtime.
 ifdef BOARD_USES_SYSTEM_DLKMIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  _system_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_system_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_system_dlkm_lib_modules_symlink)
 endif
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
@@ -3071,17 +3161,24 @@
 
 # Install system linker configuration
 # Collect all available stub libraries installed in system and install with predefined linker configuration
+# Also append LLNDK libraries in the APEX as required libs
 SYSTEM_LINKER_CONFIG := $(TARGET_OUT)/etc/linker.config.pb
 SYSTEM_LINKER_CONFIG_SOURCE := $(call intermediates-dir-for,ETC,system_linker_config)/system_linker_config
 $(SYSTEM_LINKER_CONFIG): PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE := $(SYSTEM_LINKER_CONFIG_SOURCE)
-$(SYSTEM_LINKER_CONFIG) : $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+$(SYSTEM_LINKER_CONFIG): $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE) \
-	  --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \
+		--value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
 
 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
 $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE))
 
 FULL_SYSTEMIMAGE_DEPS += $(SYSTEM_LINKER_CONFIG)
+ALL_DEFAULT_INSTALLED_MODULES += $(SYSTEM_LINKER_CONFIG)
 
 # installed file list
 # Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
@@ -3433,7 +3530,9 @@
 
 # Create symlink /vendor/odm to /odm if necessary.
 ifdef BOARD_USES_ODMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  _odm_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_odm_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_symlink)
 endif
 
 # Create symlinks for vendor_dlkm on devices with a vendor_dlkm partition:
@@ -3451,9 +3550,27 @@
 # The vendor DLKMs and other vendor_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_VENDOR_DLKMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  _vendor_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_vendor_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_dlkm_lib_modules_symlink)
 endif
 
+# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
+vendor_linker_config_file := $(TARGET_OUT_VENDOR)/etc/linker.config.pb
+$(vendor_linker_config_file): private_linker_config_fragments := $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)
+$(vendor_linker_config_file): $(INTERNAL_VENDORIMAGE_FILES) $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config proto \
+		--source $(call normalize-path-list,$(private_linker_config_fragments)) \
+		--output $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@ \
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
+$(call define declare-0p-target,$(vendor_linker_config_file),)
+INTERNAL_VENDORIMAGE_FILES += $(vendor_linker_config_file)
+ALL_DEFAULT_INSTALLED_MODULES += $(vendor_linker_config_file)
+
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
 INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
 $(INSTALLED_FILES_FILE_VENDOR): .KATI_IMPLICIT_OUTPUTS := $(INSTALLED_FILES_JSON_VENDOR)
@@ -3656,7 +3773,9 @@
 # The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_ODM_DLKMIMAGE
-  INTERNAL_ODMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  _odm_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  INTERNAL_ODMIMAGE_FILES += $(_odm_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_dlkm_lib_modules_symlink)
 endif
 
 INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
@@ -3850,7 +3969,6 @@
 $(INSTALLED_FILES_FILE_SYSTEM_DLKM): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) $(FILESLIST) $(FILESLIST_UTIL)
 	@echo Installed file list: $@
 	mkdir -p $(dir $@)
-	if [ -d "$(BOARD_SYSTEM_DLKM_SRC)" ]; then rsync -rupE $(BOARD_SYSTEM_DLKM_SRC)/ $(TARGET_OUT_SYSTEM_DLKM); fi
 	rm -f $@
 	$(FILESLIST) $(TARGET_OUT_SYSTEM_DLKM) > $(@:.txt=.json)
 	$(FILESLIST_UTIL) -c $(@:.txt=.json) > $@
@@ -3906,6 +4024,7 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
 	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
@@ -3932,14 +4051,19 @@
 
 INSTALLED_PVMFWIMAGE_TARGET := $(PRODUCT_OUT)/pvmfw.img
 INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET := $(PRODUCT_OUT)/pvmfw_embedded.avbpubkey
+INSTALLED_PVMFW_BINARY_TARGET := $(call module-installed-files,pvmfw_bin)
 INTERNAL_PVMFWIMAGE_FILES := $(call module-target-built-files,pvmfw_img)
-INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_sign_key)
+INTERNAL_PVMFW_EMBEDDED_AVBKEY := $(call module-target-built-files,pvmfw_embedded_key)
+INTERNAL_PVMFW_SYMBOL := $(TARGET_OUT_EXECUTABLES_UNSTRIPPED)/pvmfw
 
 $(call declare-1p-container,$(INSTALLED_PVMFWIMAGE_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_PVMFWIMAGE_TARGET),$(INTERNAL_PVMFWIMAGE_FILES),$(PRODUCT_OUT)/:/)
 
 UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_PVMFWIMAGE_TARGET)
 
+# Place the unstripped pvmfw image to the symbols directory
+$(INTERNAL_PVMFWIMAGE_FILES): |$(INTERNAL_PVMFW_SYMBOL)
+
 $(eval $(call copy-one-file,$(INTERNAL_PVMFWIMAGE_FILES),$(INSTALLED_PVMFWIMAGE_TARGET)))
 
 $(INSTALLED_PVMFWIMAGE_TARGET): $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET)
@@ -4036,7 +4160,8 @@
 
 INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES := \
     $(BOARD_AVB_VBMETA_SYSTEM) \
-    $(BOARD_AVB_VBMETA_VENDOR)
+    $(BOARD_AVB_VBMETA_VENDOR) \
+    $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))))
 
 # Not allowing the same partition to appear in multiple groups.
 ifneq ($(words $(sort $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES))),$(words $(INTERNAL_AVB_PARTITIONS_IN_CHAINED_VBMETA_IMAGES)))
@@ -4342,6 +4467,11 @@
 $(eval $(call check-and-set-avb-args,vbmeta_vendor))
 endif
 
+ifdef BOARD_AVB_VBMETA_CUSTOM_PARTITIONS
+$(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),$(eval $(call check-and-set-avb-args,vbmeta_$(partition))))
+$(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),$(eval BOARD_AVB_MAKE_VBMETA_$(call to-upper,$(partition))_IMAGE_ARGS += --padding_size 4096))
+endif
+
 ifneq ($(strip $(BOARD_CUSTOMIMAGES_PARTITION_LIST)),)
 $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
     $(eval $(call check-and-set-custom-avb-chain-args,$(partition))))
@@ -4370,6 +4500,13 @@
     --rollback_index $(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX)
 endif
 
+ifdef BOARD_AVB_VBMETA_CUSTOM_PARTITIONS
+  $(foreach partition,$(call to-upper,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)), \
+      $(if $(BOARD_AVB_VBMETA_$(partition)_ROLLBACK_INDEX),$(eval \
+        BOARD_AVB_MAKE_VBMETA_$(partition)_IMAGE_ARGS += \
+          --rollback_index $(BOARD_AVB_VBMETA_$(partition)_ROLLBACK_INDEX))))
+endif
+
 # $(1): the directory to extract public keys to
 define extract-avb-chain-public-keys
   $(if $(BOARD_AVB_BOOT_KEY_PATH),\
@@ -4426,7 +4563,11 @@
   $(if $(BOARD_CUSTOMIMAGES_PARTITION_LIST),\
     $(hide) $(foreach partition,$(BOARD_CUSTOMIMAGES_PARTITION_LIST), \
         $(AVBTOOL) extract_public_key --key $(BOARD_AVB_$(call to-upper,$(partition))_KEY_PATH) \
-            --output $(1)/$(partition).avbpubkey;))
+            --output $(1)/$(partition).avbpubkey;)) \
+  $(if $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\
+    $(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
+        $(AVBTOOL) extract_public_key --key $(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_KEY_PATH) \
+            --output $(1)/vbmeta_$(partition).avbpubkey;))
 endef
 
 # Builds a chained VBMeta image. This VBMeta image will contain the descriptors for the partitions
@@ -4438,13 +4579,13 @@
 # $(1): VBMeta image name, such as "vbmeta_system", "vbmeta_vendor" etc.
 # $(2): Output filename.
 define build-chained-vbmeta-image
-  $(call pretty,"Target chained vbmeta image: $@")
-  $(hide) $(AVBTOOL) make_vbmeta_image \
-      $(INTERNAL_AVB_$(call to-upper,$(1))_SIGNING_ARGS) \
-      $(BOARD_AVB_MAKE_$(call to-upper,$(1))_IMAGE_ARGS) \
-      $(foreach image,$(BOARD_AVB_$(call to-upper,$(1))), \
-          --include_descriptors_from_image $(call images-for-partitions,$(image))) \
-      --output $@
+	$(call pretty,"Target chained vbmeta image: $@")
+	$(hide) $(AVBTOOL) make_vbmeta_image \
+	    $(INTERNAL_AVB_$(call to-upper,$(1))_SIGNING_ARGS) \
+	    $(BOARD_AVB_MAKE_$(call to-upper,$(1))_IMAGE_ARGS) \
+	    $(foreach image,$(BOARD_AVB_$(call to-upper,$(1))), \
+	        --include_descriptors_from_image $(call images-for-partitions,$(image))) \
+	    --output $@
 endef
 
 ifdef BUILDING_SYSTEM_IMAGE
@@ -4472,7 +4613,26 @@
 
 $(call declare-1p-container,$(INSTALLED_VBMETA_VENDORIMAGE_TARGET),)
 
-UNMOUNTED_NOTICE_VENDOR_DEPS+= $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
+endif
+
+ifdef BOARD_AVB_VBMETA_CUSTOM_PARTITIONS
+define declare-custom-vbmeta-target
+INSTALLED_VBMETA_$(call to-upper,$(1))IMAGE_TARGET := $(PRODUCT_OUT)/vbmeta_$(call to-lower,$(1)).img
+$$(INSTALLED_VBMETA_$(call to-upper,$(1))IMAGE_TARGET): \
+	    $(AVBTOOL) \
+	    $(call images-for-partitions,$(BOARD_AVB_VBMETA_$(call to-upper,$(1)))) \
+	    $(BOARD_AVB_VBMETA_$(call to-upper,$(1))_KEY_PATH)
+	$$(call build-chained-vbmeta-image,vbmeta_$(call to-lower,$(1)))
+
+$(call declare-1p-container,$(INSTALLED_VBMETA_$(call to-upper,$(1))IMAGE_TARGET),)
+
+UNMOUNTED_NOTICE_VENDOR_DEPS += $(INSTALLED_VBMETA_$(call to-upper,$(1))IMAGE_TARGET)
+endef
+
+$(foreach partition,\
+          $(call to-upper,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)),\
+          $(eval $(call declare-custom-vbmeta-target,$(partition))))
 endif
 
 define build-vbmetaimage-target
@@ -4492,6 +4652,7 @@
 $(INSTALLED_VBMETAIMAGE_TARGET): PRIVATE_AVB_VBMETA_SIGNING_ARGS := \
     --algorithm $(BOARD_AVB_ALGORITHM) --key $(BOARD_AVB_KEY_PATH)
 
+
 $(INSTALLED_VBMETAIMAGE_TARGET): \
 	    $(AVBTOOL) \
 	    $(INSTALLED_BOOTIMAGE_TARGET) \
@@ -4512,8 +4673,10 @@
 	    $(INSTALLED_RECOVERYIMAGE_TARGET) \
 	    $(INSTALLED_VBMETA_SYSTEMIMAGE_TARGET) \
 	    $(INSTALLED_VBMETA_VENDORIMAGE_TARGET) \
+      $(foreach partition,$(call to-upper,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)),$(INSTALLED_VBMETA_$(partition)IMAGE_TARGET)) \
 	    $(BOARD_AVB_VBMETA_SYSTEM_KEY_PATH) \
 	    $(BOARD_AVB_VBMETA_VENDOR_KEY_PATH) \
+      $(foreach partition,$(call to-upper,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)),$(BOARD_AVB_VBMETA_$(partition)_KEY_PATH)) \
 	    $(BOARD_AVB_KEY_PATH)
 	$(build-vbmetaimage-target)
 
@@ -4542,6 +4705,7 @@
     $(INTERNAL_VENDOR_DLKMIMAGE_FILES) \
     $(INTERNAL_ODM_DLKMIMAGE_FILES) \
     $(INTERNAL_SYSTEM_DLKMIMAGE_FILES) \
+    $(INTERNAL_PVMFWIMAGE_FILES) \
 
 # -----------------------------------------------------------------
 # Check VINTF of build
@@ -4554,35 +4718,39 @@
 
 APEX_OUT := $(PRODUCT_OUT)/apex
 # -----------------------------------------------------------------
-# Create apex-info-file.xsd
+# Create apex-info-file.xml
 
-APEX_DIRS := \
+apex_dirs := \
   $(TARGET_OUT)/apex/% \
   $(TARGET_OUT_SYSTEM_EXT)/apex/% \
   $(TARGET_OUT_VENDOR)/apex/% \
   $(TARGET_OUT_ODM)/apex/% \
   $(TARGET_OUT_PRODUCT)/apex/% \
 
-apex_vintf_files := $(sort $(filter $(APEX_DIRS), $(INTERNAL_ALLIMAGES_FILES)))
-APEX_INFO_FILE   := $(APEX_OUT)/apex-info-list.xml
+apex_files := $(sort $(filter $(apex_dirs), $(INTERNAL_ALLIMAGES_FILES)))
+APEX_INFO_FILE := $(APEX_OUT)/apex-info-list.xml
 
-$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/dump_apex_info $(apex_vintf_files)
+# dump_apex_info scans $(PRODUCT_OUT)/apex and writes apex-info-list.xml there.
+# This relies on the fact that rules for .apex files install the contents in $(PRODUCT_OUT)/apex.
+$(APEX_INFO_FILE): $(HOST_OUT_EXECUTABLES)/dump_apex_info $(apex_files)
 	@echo "Creating apex-info-file in $(PRODUCT_OUT) "
-	$< --root_dir $(PRODUCT_OUT) --out_file $@
+	$< --root_dir $(PRODUCT_OUT)
 
-apex_vintf_files :=
+apex_files :=
+apex_dirs :=
 
 # The build system only writes VINTF metadata to */etc/vintf paths. Legacy paths aren't needed here
 # because they are only used for prebuilt images.
+# APEX files in /vendor/apex can have VINTF fragments as well.
 check_vintf_common_srcs_patterns := \
   $(TARGET_OUT)/etc/vintf/% \
   $(TARGET_OUT_VENDOR)/etc/vintf/% \
   $(TARGET_OUT_ODM)/etc/vintf/% \
   $(TARGET_OUT_PRODUCT)/etc/vintf/% \
   $(TARGET_OUT_SYSTEM_EXT)/etc/vintf/% \
+  $(TARGET_OUT_VENDOR)/apex/% \
 
 check_vintf_common_srcs := $(sort $(filter $(check_vintf_common_srcs_patterns),$(INTERNAL_ALLIMAGES_FILES)))
-check_vintf_common_srcs += $(APEX_INFO_FILE)
 check_vintf_common_srcs_patterns :=
 
 check_vintf_has_system :=
@@ -4604,7 +4772,7 @@
 check_vintf_all_deps += $(check_vintf_system_log)
 $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
 	@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
-$(call declare-0p-target,$(check_vintf_system_log))
+$(call declare-1p-target,$(check_vintf_system_log))
 check_vintf_system_log :=
 
 # -- Check framework manifest against frozen manifests for GSI targets. They need to be compatible.
@@ -4616,14 +4784,15 @@
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
 	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
-$(call declare-0p-target,$(vintffm_log))
+$(call declare-1p-target,$(vintffm_log))
 
 endif # check_vintf_system_deps
 check_vintf_system_deps :=
 
 # -- Check vendor manifest / matrix including fragments (excluding other device manifests / matrices)
 check_vintf_vendor_deps := $(filter $(TARGET_OUT_VENDOR)/etc/vintf/%, $(check_vintf_common_srcs))
-ifneq ($(check_vintf_vendor_deps),)
+check_vintf_vendor_deps += $(filter $(TARGET_OUT_VENDOR)/apex/%, $(check_vintf_common_srcs))
+ifneq ($(strip $(check_vintf_vendor_deps)),)
 check_vintf_has_vendor := true
 check_vintf_vendor_log := $(intermediates)/check_vintf_vendor.log
 check_vintf_all_deps += $(check_vintf_vendor_log)
@@ -4634,12 +4803,12 @@
   $(if $(DEVICE_MANIFEST_FILE),EMPTY_VENDOR_SKU_PLACEHOLDER,\
     $(if $(DEVICE_MANIFEST_SKUS),,EMPTY_VENDOR_SKU_PLACEHOLDER)) \
   $(DEVICE_MANIFEST_SKUS)
-$(check_vintf_vendor_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_vendor_deps)
+$(check_vintf_vendor_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_vendor_deps) $(APEX_INFO_FILE)
 	$(foreach vendor_sku,$(PRIVATE_VENDOR_SKUS), \
-	  ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) \
+	  ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) --dirmap /apex:$(APEX_OUT) \
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       > $@ 2>&1 ) || ( cat $@ && exit 1 ); )
-$(call declare-0p-target,$(check_vintf_vendor_log))
+$(call declare-1p-target,$(check_vintf_vendor_log))
 check_vintf_vendor_log :=
 endif # check_vintf_vendor_deps
 check_vintf_vendor_deps :=
@@ -4661,8 +4830,8 @@
 $(BUILT_KERNEL_VERSION_FILE):
 	echo $(BOARD_KERNEL_VERSION) > $@
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
-$(call declare-0p-target,$(BUILT_KERNEL_VERSION_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+$(call declare-license-metadata,$(BUILT_KERNEL_VERSION_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # BOARD_KERNEL_VERSION
@@ -4687,7 +4856,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_KERNEL_TARGET
@@ -4708,7 +4877,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_BOOTIMAGE_TARGET
@@ -4750,7 +4919,7 @@
 check_vintf_all_deps += $(check_vintf_compatible_log)
 
 check_vintf_compatible_args :=
-check_vintf_compatible_deps := $(check_vintf_common_srcs)
+check_vintf_compatible_deps := $(check_vintf_common_srcs) $(APEX_INFO_FILE)
 
 ifeq ($(PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS),true)
 ifneq (,$(BUILT_KERNEL_VERSION_FILE)$(BUILT_KERNEL_CONFIGS_FILE))
@@ -4770,7 +4939,6 @@
 ifdef PRODUCT_SHIPPING_API_LEVEL
 check_vintf_compatible_args += --property ro.product.first_api_level=$(PRODUCT_SHIPPING_API_LEVEL)
 endif # PRODUCT_SHIPPING_API_LEVEL
-check_vintf_compatible_args += --apex-info-file $(APEX_INFO_FILE)
 
 $(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_ARGS := $(check_vintf_compatible_args)
 $(check_vintf_compatible_log): PRIVATE_CHECK_VINTF_DEPS := $(check_vintf_compatible_deps)
@@ -4801,7 +4969,7 @@
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       >> $@ 2>&1 ) || (cat $@ && exit 1); ))
 
-$(call declare-0p-target,$(check_vintf_compatible_log))
+$(call declare-1p-target,$(check_vintf_compatible_log))
 
 check_vintf_compatible_log :=
 check_vintf_compatible_args :=
@@ -4866,7 +5034,7 @@
 	  $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/misc_info.txt, \
 	  $@)
 
-$(call declare-0p-target,$(check_all_partition_sizes_log))
+$(call declare-1p-target,$(check_all_partition_sizes_log))
 
 .PHONY: check-all-partition-sizes
 check-all-partition-sizes: $(check_all_partition_sizes_log)
@@ -4968,6 +5136,7 @@
   img2simg \
   img_from_target_files \
   imgdiff \
+  initrd_bootconfig \
   libconscrypt_openjdk_jni \
   lpmake \
   lpunpack \
@@ -5008,21 +5177,26 @@
   verity_verifier \
   zipalign \
   zucchini \
+  zip2zip \
+
 
 # Additional tools to unpack and repack the apex file.
 INTERNAL_OTATOOLS_MODULES += \
   apexer \
   apex_compression_tool \
+  blkid_static \
   deapexer \
   debugfs_static \
   dump_apex_info \
+  fsck.erofs \
+  make_erofs \
   merge_zips \
   resize2fs \
   soong_zip \
 
 ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
 INTERNAL_OTATOOLS_MODULES += \
-  futility \
+  futility-host \
   vboot_signer
 endif
 
@@ -5045,6 +5219,12 @@
   $(sort $(shell find build/make/target/product/security -type f -name "*.x509.pem" -o \
       -name "*.pk8"))
 
+ifneq (,$(wildcard packages/modules))
+INTERNAL_OTATOOLS_PACKAGE_FILES += \
+  $(sort $(shell find packages/modules -type f -name "*.x509.pem" -o -name "*.pk8" -o -name \
+      "key.pem"))
+endif
+
 ifneq (,$(wildcard device))
 INTERNAL_OTATOOLS_PACKAGE_FILES += \
   $(sort $(shell find device $(wildcard vendor) -type f -name "*.pk8" -o -name "verifiedboot*" -o \
@@ -5084,6 +5264,10 @@
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
 
+$(call dist-for-goals, otatools-package, \
+  $(BUILT_OTATOOLS_PACKAGE) \
+)
+
 endif # build_otatools_package
 
 # -----------------------------------------------------------------
@@ -5252,6 +5436,15 @@
 	$(hide) echo "avb_vbmeta_vendor_algorithm=$(BOARD_AVB_VBMETA_VENDOR_ALGORITHM)" >> $@
 	$(hide) echo "avb_vbmeta_vendor_rollback_index_location=$(BOARD_AVB_VBMETA_VENDOR_ROLLBACK_INDEX_LOCATION)" >> $@
 endif # BOARD_AVB_VBMETA_VENDOR_KEY_PATH
+ifneq (,$(strip $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)))
+	$(hide) echo "avb_custom_vbmeta_images_partition_list=$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)" >> $@
+	$(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS),\
+	echo "avb_vbmeta_$(partition)=$(BOARD_AVB_VBMETA_$(call to-upper,$(partition)))" >> $@ ;\
+	echo "avb_vbmeta_$(partition)_args=$(BOARD_AVB_MAKE_VBMETA_$(call to-upper,$(partition))_IMAGE_ARGS)" >> $@ ;\
+	echo "avb_vbmeta_$(partition)_key_path=$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_KEY_PATH)" >> $@ ;\
+	echo "avb_vbmeta_$(partition)_algorithm=$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ALGORITHM)" >> $@ ;\
+	echo "avb_vbmeta_$(partition)_rollback_index_location=$(BOARD_AVB_VBMETA_$(call to-upper,$(partition))_ROLLBACK_INDEX_LOCATION)" >> $@ ;)
+endif # BOARD_AVB_VBMETA_CUSTOM_PARTITIONS
 endif # BOARD_AVB_ENABLE
 ifdef BOARD_BPT_INPUT_FILES
 	$(hide) echo "board_bpt_enable=true" >> $@
@@ -5622,6 +5815,7 @@
 	    $(INSTALLED_CACHEIMAGE_TARGET) \
 	    $(INSTALLED_DTBOIMAGE_TARGET) \
 	    $(INSTALLED_PVMFWIMAGE_TARGET) \
+	    $(INSTALLED_PVMFW_BINARY_TARGET) \
 	    $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) \
 	    $(INSTALLED_CUSTOMIMAGES_TARGET) \
 	    $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
@@ -5972,6 +6166,8 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 	$(hide) cp $(INSTALLED_PVMFW_EMBEDDED_AVBKEY_TARGET) $(zip_root)/PREBUILT_IMAGES/
+	$(hide) mkdir -p $(zip_root)/PVMFW
+	$(hide) cp $(INSTALLED_PVMFW_BINARY_TARGET) $(zip_root)/PVMFW/
 endif
 ifdef BOARD_PREBUILT_BOOTLOADER
 	$(hide) mkdir -p $(zip_root)/IMAGES
@@ -6020,6 +6216,7 @@
 ifdef BUILDING_INIT_BOOT_IMAGE
 	$(hide) $(call package_files-copy-root, $(TARGET_RAMDISK_OUT),$(zip_root)/INIT_BOOT/RAMDISK)
 	$(hide) $(call fs_config,$(zip_root)/INIT_BOOT/RAMDISK,) > $(zip_root)/META/init_boot_filesystem_config.txt
+	$(hide) cp $(RAMDISK_NODE_LIST) $(zip_root)/META/ramdisk_node_list
 ifdef BOARD_KERNEL_PAGESIZE
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize
 endif # BOARD_KERNEL_PAGESIZE
@@ -6824,12 +7021,26 @@
 	$(HOST_OUT_EXECUTABLES)/atree \
 	$(HOST_OUT_EXECUTABLES)/line_endings
 
+# The name of the subdir within the platforms dir of the sdk. One of:
+# - android-<SDK_INT> (stable base dessert SDKs)
+# - android-<CODENAME> (stable extension SDKs)
+# - android-<SDK_INT>-ext<EXT_INT> (codename SDKs)
+sdk_platform_dir_name := $(strip \
+  $(if $(filter REL,$(PLATFORM_VERSION_CODENAME)), \
+    $(if $(filter $(PLATFORM_SDK_EXTENSION_VERSION),$(PLATFORM_BASE_SDK_EXTENSION_VERSION)), \
+      android-$(PLATFORM_SDK_VERSION), \
+      android-$(PLATFORM_SDK_VERSION)-ext$(PLATFORM_SDK_EXTENSION_VERSION) \
+    ), \
+    android-$(PLATFORM_VERSION_CODENAME) \
+  ) \
+)
+
 INTERNAL_SDK_TARGET := $(sdk_dir)/$(sdk_name).zip
 $(INTERNAL_SDK_TARGET): PRIVATE_NAME := $(sdk_name)
 $(INTERNAL_SDK_TARGET): PRIVATE_DIR := $(sdk_dir)/$(sdk_name)
 $(INTERNAL_SDK_TARGET): PRIVATE_DEP_FILE := $(sdk_dep_file)
 $(INTERNAL_SDK_TARGET): PRIVATE_INPUT_FILES := $(sdk_atree_files)
-
+$(INTERNAL_SDK_TARGET): PRIVATE_PLATFORM_NAME := $(sdk_platform_dir_name)
 # Set SDK_GNU_ERROR to non-empty to fail when a GNU target is built.
 #
 #SDK_GNU_ERROR := true
@@ -6854,7 +7065,7 @@
 	        -I $(PRODUCT_OUT) \
 	        -I $(HOST_OUT) \
 	        -I $(TARGET_COMMON_OUT_ROOT) \
-	        -v "PLATFORM_NAME=android-$(PLATFORM_VERSION)" \
+	        -v "PLATFORM_NAME=$(PRIVATE_PLATFORM_NAME)" \
 	        -v "OUT_DIR=$(OUT_DIR)" \
 	        -v "HOST_OUT=$(HOST_OUT)" \
 	        -v "TARGET_ARCH=$(TARGET_ARCH)" \
diff --git a/core/OWNERS b/core/OWNERS
index d48ceab..eb1d5c3 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,5 +1,3 @@
-per-file *dex_preopt*.* = ngeoffray@google.com,skvadrik@google.com
-per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
 
 # For global Proguard rules
 per-file proguard*.flags = jdduke@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 9f305cf..718adb5 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -41,6 +41,37 @@
 # MODULE_BUILD_FROM_SOURCE.
 BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
 
+ifneq ($(SANITIZE_TARGET)$(EMMA_INSTRUMENT_FRAMEWORK),)
+  # Always use sources when building the framework with Java coverage or
+  # sanitized builds as they both require purpose built prebuilts which we do
+  # not provide.
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ART does not provide linux_bionic variants needed for products that
+# set HOST_CROSS_OS=linux_bionic.
+ifeq (linux_bionic,${HOST_CROSS_OS})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ART does not provide host side arm64 variants needed for products that
+# set HOST_CROSS_ARCH=arm64.
+ifeq (arm64,${HOST_CROSS_ARCH})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# TV based devices do not seem to work with prebuilts, so build from source
+# for now and fix in a follow up.
+ifneq (,$(filter tv,$(subst $(comma),$(space),${PRODUCT_CHARACTERISTICS})))
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ATV based devices do not seem to work with prebuilts, so build from source
+# for now and fix in a follow up.
+ifneq (,${PRODUCT_IS_ATV})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
 ifneq (,$(MODULE_BUILD_FROM_SOURCE))
   # Keep an explicit setting.
 else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
@@ -71,12 +102,17 @@
 
 $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
 
+ifdef TARGET_BOARD_AUTO
+  $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
+endif
+
 # Ensure that those mainline modules who have individually toggleable prebuilts
 # are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
 # default.
 INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
   bluetooth \
   permission \
+  rkpd \
   uwb \
   wifi \
 
@@ -87,6 +123,10 @@
 # Apex build mode variables
 ifdef APEX_BUILD_FOR_PRE_S_DEVICES
 $(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
+else
+ifdef KEEP_APEX_INHERIT
+$(call add_soong_config_var_value,ANDROID,library_linking_strategy,prefer_static)
+endif
 endif
 
 ifeq (true,$(MODULE_BUILD_FROM_SOURCE))
@@ -98,10 +138,18 @@
 $(call soong_config_set,messaging,build_variant_eng,true)
 endif
 
-# TODO(b/203088572): Remove when Java optimizations enabled by default for
-# SystemUI.
+# Enable SystemUI optimizations by default unless explicitly set.
+SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
 
+# Disable Compose in SystemUI by default.
+SYSTEMUI_USE_COMPOSE ?= false
+$(call add_soong_config_var,ANDROID,SYSTEMUI_USE_COMPOSE)
+
+ifdef PRODUCT_AVF_ENABLED
+$(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
+endif
+
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
 # TODO(b/240588226): Remove the off-by-default exceptions after handling
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index eb429cd..9fab44d 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -302,3 +302,7 @@
 
 endif # LOCAL_PACKAGE_SPLITS
 
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/art_config.mk b/core/art_config.mk
new file mode 100644
index 0000000..1ea05db
--- /dev/null
+++ b/core/art_config.mk
@@ -0,0 +1,46 @@
+# ART configuration that has to be determined after product config is resolved.
+#
+# Inputs:
+# PRODUCT_ENABLE_UFFD_GC: See comments in build/make/core/product.mk.
+# OVERRIDE_ENABLE_UFFD_GC: Overrides PRODUCT_ENABLE_UFFD_GC. Can be passed from the commandline for
+# debugging purposes.
+# BOARD_API_LEVEL: See comments in build/make/core/main.mk.
+# BOARD_SHIPPING_API_LEVEL: See comments in build/make/core/main.mk.
+# PRODUCT_SHIPPING_API_LEVEL: See comments in build/make/core/product.mk.
+#
+# Outputs:
+# ENABLE_UFFD_GC: Whether to use userfaultfd GC.
+
+config_enable_uffd_gc := \
+  $(firstword $(OVERRIDE_ENABLE_UFFD_GC) $(PRODUCT_ENABLE_UFFD_GC))
+
+ifeq (,$(filter-out default,$(config_enable_uffd_gc)))
+  ENABLE_UFFD_GC := true
+
+  # Disable userfaultfd GC if the device doesn't support it (i.e., if
+  # `min(ro.board.api_level ?? ro.board.first_api_level ?? MAX_VALUE,
+  #      ro.product.first_api_level ?? ro.build.version.sdk ?? MAX_VALUE) < 31`)
+  # This logic aligns with how `ro.vendor.api_level` is calculated in
+  # `system/core/init/property_service.cpp`.
+  # We omit the check on `ro.build.version.sdk` here because we are on the latest build system.
+  board_api_level := $(firstword $(BOARD_API_LEVEL) $(BOARD_SHIPPING_API_LEVEL))
+  ifneq (,$(board_api_level))
+    ifeq (true,$(call math_lt,$(board_api_level),31))
+      ENABLE_UFFD_GC := false
+    endif
+  endif
+
+  ifneq (,$(PRODUCT_SHIPPING_API_LEVEL))
+    ifeq (true,$(call math_lt,$(PRODUCT_SHIPPING_API_LEVEL),31))
+      ENABLE_UFFD_GC := false
+    endif
+  endif
+else ifeq (true,$(config_enable_uffd_gc))
+  ENABLE_UFFD_GC := true
+else ifeq (false,$(config_enable_uffd_gc))
+  ENABLE_UFFD_GC := false
+else
+  $(error Unknown PRODUCT_ENABLE_UFFD_GC value: $(config_enable_uffd_gc))
+endif
+
+ADDITIONAL_PRODUCT_PROPERTIES += ro.dalvik.vm.enable_uffd_gc=$(ENABLE_UFFD_GC)
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 00f5f21..c453469 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -600,7 +600,11 @@
       # Manually handle the case where the
       # output file is in the recovery or ramdisk partition.
       ifneq (,$(filter $(TARGET_RECOVERY_ROOT_OUT)/%,$(my_module_path)))
-        my_init_rc_path := $(TARGET_RECOVERY_ROOT_OUT)/system/etc
+        ifneq (,$(filter $(TARGET_RECOVERY_ROOT_OUT)/first_stage_ramdisk/%,$(my_module_path)))
+            my_init_rc_path := $(TARGET_RECOVERY_ROOT_OUT)/first_stage_ramdisk/system/etc
+        else
+            my_init_rc_path := $(TARGET_RECOVERY_ROOT_OUT)/system/etc
+        endif
       else ifneq (,$(filter $(TARGET_RAMDISK_OUT)/%,$(my_module_path)))
         my_init_rc_path := $(TARGET_RAMDISK_OUT)/system/etc
       else
@@ -712,6 +716,15 @@
 ## Compatibility suite files.
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
+
+ifneq (,$(LOCAL_FULL_TEST_CONFIG))
+  test_config := $(LOCAL_FULL_TEST_CONFIG)
+else ifneq (,$(LOCAL_TEST_CONFIG))
+  test_config := $(LOCAL_PATH)/$(LOCAL_TEST_CONFIG)
+else
+  test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
+endif
+
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
 
 # If we are building a native test or benchmark and its stem variants are not defined,
@@ -758,13 +771,6 @@
 
 
 # Auto-generate build config.
-ifneq (,$(LOCAL_FULL_TEST_CONFIG))
-  test_config := $(LOCAL_FULL_TEST_CONFIG)
-else ifneq (,$(LOCAL_TEST_CONFIG))
-  test_config := $(LOCAL_PATH)/$(LOCAL_TEST_CONFIG)
-else
-  test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
-endif
 ifeq (,$(test_config))
   ifneq (true,$(is_native))
     is_instrumentation_test := true
@@ -843,16 +849,6 @@
   endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
-# HACK: pretend a soong LOCAL_FULL_TEST_CONFIG is autogenerated by setting the flag in
-# module-info.json
-# TODO: (b/113029686) Add explicit flag from Soong to determine if a test was
-# autogenerated.
-ifneq (,$(filter $(SOONG_OUT_DIR)%,$(LOCAL_FULL_TEST_CONFIG)))
-  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    ALL_MODULES.$(my_register_name).auto_test_config := true
-  endif
-endif
-
 
 ifeq ($(use_testcase_folder),true)
 ifneq ($(my_test_data_file_pairs),)
@@ -893,6 +889,17 @@
   $(eval my_compat_dist_test_data_$(suite) := ))
 
 endif  # LOCAL_UNINSTALLABLE_MODULE
+
+# HACK: pretend a soong LOCAL_FULL_TEST_CONFIG is autogenerated by setting the flag in
+# module-info.json
+# TODO: (b/113029686) Add explicit flag from Soong to determine if a test was
+# autogenerated.
+ifneq (,$(filter $(SOONG_OUT_DIR)%,$(LOCAL_FULL_TEST_CONFIG)))
+  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    ALL_MODULES.$(my_register_name).auto_test_config := true
+  endif
+endif
+
 endif  # LOCAL_COMPATIBILITY_SUITE
 
 my_supported_variant :=
@@ -942,6 +949,8 @@
     $(ALL_MODULES.$(my_register_name).CHECKED) $(my_checked_module)
 ALL_MODULES.$(my_register_name).BUILT := \
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
+ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE := \
+    $(ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE) $(LOCAL_SOONG_MODULE_TYPE)
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).TARGET_BUILT := \
     $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
@@ -1233,3 +1242,8 @@
 ###########################################################
 
 include $(BUILD_NOTICE_FILE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/binary.mk b/core/binary.mk
index 1ad9be8..579e6b5 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -19,7 +19,11 @@
 # supply that, for example, when building libc itself.
 ifdef LOCAL_IS_HOST_MODULE
   ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
+    ifdef USE_HOST_MUSL
+      my_system_shared_libraries := libc_musl
+    else
       my_system_shared_libraries :=
+    endif
   else
       my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
   endif
@@ -54,6 +58,9 @@
 my_cppflags := $(LOCAL_CPPFLAGS)
 my_cflags_no_override := $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
 my_cppflags_no_override := $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
+ifeq ($(my_32_64_bit_suffix), 64)
+  my_cflags_no_override += $(GLOBAL_CLANG_CFLAGS_64_NO_OVERRIDE)
+endif
 ifdef is_third_party
     my_cflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
     my_cppflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
@@ -161,7 +168,6 @@
 endif
 endif
 
-my_ndk_sysroot :=
 my_ndk_sysroot_include :=
 my_ndk_sysroot_lib :=
 my_api_level := 10000
@@ -176,11 +182,7 @@
   # Make sure we've built the NDK.
   my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
 
-  ifneq (,$(filter arm64 x86_64,$(my_arch)))
-    my_min_sdk_version := 21
-  else
-    my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
-  endif
+  my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
 
   # Historically we've just set up a bunch of symlinks in prebuilts/ndk to map
   # missing API levels to existing ones where necessary, but we're not doing
@@ -193,38 +195,19 @@
 
   my_ndk_crt_version := $(my_ndk_api)
 
-  my_ndk_hist_api := $(my_ndk_api)
-  ifeq ($(my_ndk_api),current)
-    # The last API level supported by the old prebuilt NDKs.
-    my_ndk_hist_api := 24
-  else
+  ifneq ($(my_ndk_api),current)
     my_api_level := $(my_ndk_api)
   endif
 
   my_ndk_source_root := \
       $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
-  my_ndk_sysroot := \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/platforms/android-$(my_ndk_hist_api)/arch-$(my_arch)
   my_built_ndk := $(SOONG_OUT_DIR)/ndk
   my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE)
   my_ndk_sysroot_include := \
       $(my_built_ndk)/sysroot/usr/include \
       $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
-      $(my_ndk_sysroot)/usr/include \
 
-  # x86_64 is a multilib toolchain, so their libraries are
-  # installed in /usr/lib64. Aarch64, on the other hand, is not a multilib
-  # compiler, so its libraries are in /usr/lib.
-  ifneq (,$(filter x86_64,$(my_arch)))
-    my_ndk_libdir_name := lib64
-  else
-    my_ndk_libdir_name := lib
-  endif
-
-  my_ndk_platform_dir := \
-      $(my_built_ndk)/platforms/android-$(my_ndk_api)/arch-$(my_arch)
-  my_built_ndk_libs := $(my_ndk_platform_dir)/usr/$(my_ndk_libdir_name)
-  my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/$(my_ndk_libdir_name)
+  my_ndk_sysroot_lib := $(my_built_ndk)/sysroot/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
 
   # The bionic linker now has support for packed relocations and gnu style
   # hashes (which are much faster!), but shipping to older devices requires
@@ -348,9 +331,11 @@
 else # LOCAL_IS_HOST_MODULE
   # Add -ldl, -lpthread, -lm and -lrt to host builds to match the default behavior of
   # device builds
-  my_ldlibs += -ldl -lpthread -lm
-  ifneq ($(HOST_OS),darwin)
-    my_ldlibs += -lrt
+  ifndef USE_HOST_MUSL
+    my_ldlibs += -ldl -lpthread -lm
+    ifneq ($(HOST_OS),darwin)
+      my_ldlibs += -lrt
+    endif
   endif
 endif
 
@@ -1419,7 +1404,6 @@
 my_ndk_shared_libraries_fullpath := \
     $(foreach _lib,$(my_ndk_shared_libraries),\
         $(if $(filter $(NDK_KNOWN_LIBS),$(_lib)),\
-            $(my_built_ndk_libs)/$(_lib)$(so_suffix),\
             $(my_ndk_sysroot_lib)/$(_lib)$(so_suffix)))
 
 built_shared_libraries += \
diff --git a/core/board_config.mk b/core/board_config.mk
index 70c91a8..fae7aaa 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -188,6 +188,7 @@
   BUILD_BROKEN_PREBUILT_ELF_FILES \
   BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
   BUILD_BROKEN_USES_NETWORK \
+  BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \
   BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
 
@@ -918,12 +919,6 @@
 endif
 .KATI_READONLY := BOARD_USES_PVMFWIMAGE
 
-BUILDING_PVMFW_IMAGE :=
-ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
-  BUILDING_PVMFW_IMAGE := true
-endif
-.KATI_READONLY := BUILDING_PVMFW_IMAGE
-
 ###########################################
 # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE.
 TARGET_RECOVERY_UPDATER_LIBS ?=
@@ -995,19 +990,13 @@
 endif
 
 ###########################################
-# APEXes are by default flattened, i.e. non-updatable, if not building unbundled
-# apps. It can be unflattened (and updatable) by inheriting from
-# updatable_apex.mk
+# APEXes are by default not flattened, i.e. updatable.
 #
 # APEX flattening can also be forcibly enabled (resp. disabled) by
 # setting OVERRIDE_TARGET_FLATTEN_APEX to true (resp. false), e.g. by
 # setting the OVERRIDE_TARGET_FLATTEN_APEX environment variable.
 ifdef OVERRIDE_TARGET_FLATTEN_APEX
   TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
-else
-  ifeq (,$(TARGET_BUILD_APPS)$(TARGET_FLATTEN_APEX))
-    TARGET_FLATTEN_APEX := true
-  endif
 endif
 
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
index ddeb0d7..8289bf2 100644
--- a/core/board_config_wifi.mk
+++ b/core/board_config_wifi.mk
@@ -74,4 +74,10 @@
 endif
 ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
     $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif
+ifdef WIFI_SKIP_STATE_TOGGLE_OFF_ON_FOR_NAN
+    $(call soong_config_set,wifi,wifi_skip_state_toggle_off_on_for_nan,true)
+endif
+ifeq ($(strip $(TARGET_USES_AOSP_FOR_WLAN)),true)
+    $(call soong_config_set,wifi,target_uses_aosp_for_wlan,true)
 endif
\ No newline at end of file
diff --git a/core/clang/OWNERS b/core/clang/OWNERS
deleted file mode 100644
index d41d3fc..0000000
--- a/core/clang/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-chh@google.com
-pirama@google.com
-srhines@google.com
-yikong@google.com
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 28a75ec..d03c541 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -2,7 +2,7 @@
 
 LLVM_READOBJ := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-readobj
 
-LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
+LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
 
 define convert-to-clang-flags
 $(strip $(filter-out $(CLANG_CONFIG_UNKNOWN_CFLAGS),$(1)))
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 5576785..f41f1b7 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -33,8 +33,6 @@
 
 # CTS-specific config.
 -include cts/build/config.mk
-# VTS-specific config.
--include test/vts/tools/vts-tradefed/build/config.mk
 # device-tests-specific-config.
 -include tools/tradefederation/build/suites/device-tests/config.mk
 # general-tests-specific-config.
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index e325760..bb7ba1b 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -293,6 +293,7 @@
 LOCAL_SOONG_LICENSE_METADATA :=
 LOCAL_SOONG_LINK_TYPE :=
 LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_MODULE_TYPE :=
 LOCAL_SOONG_PROGUARD_DICT :=
 LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
@@ -502,6 +503,7 @@
 
 # Robolectric variables
 LOCAL_INSTRUMENT_SOURCE_DIRS :=
+LOCAL_INSTRUMENT_SRCJARS :=
 LOCAL_ROBOTEST_FAILURE_FATAL :=
 LOCAL_ROBOTEST_FILES :=
 LOCAL_ROBOTEST_TIMEOUT :=
diff --git a/core/combo/arch/arm64/armv9-a.mk b/core/combo/arch/arm64/armv9-a.mk
new file mode 100644
index 0000000..de0760a
--- /dev/null
+++ b/core/combo/arch/arm64/armv9-a.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the new armv9-a Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/combo/arch/x86/goldmont-plus.mk b/core/combo/arch/x86/goldmont-plus.mk
new file mode 100644
index 0000000..4ce2053
--- /dev/null
+++ b/core/combo/arch/x86/goldmont-plus.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont-plus arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86/goldmont.mk b/core/combo/arch/x86/goldmont.mk
new file mode 100644
index 0000000..b5a6ff2
--- /dev/null
+++ b/core/combo/arch/x86/goldmont.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86/tremont.mk b/core/combo/arch/x86/tremont.mk
new file mode 100644
index 0000000..b80d228
--- /dev/null
+++ b/core/combo/arch/x86/tremont.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# tremont arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/goldmont-plus.mk b/core/combo/arch/x86_64/goldmont-plus.mk
new file mode 100644
index 0000000..4ce2053
--- /dev/null
+++ b/core/combo/arch/x86_64/goldmont-plus.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont-plus arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/goldmont.mk b/core/combo/arch/x86_64/goldmont.mk
new file mode 100644
index 0000000..b5a6ff2
--- /dev/null
+++ b/core/combo/arch/x86_64/goldmont.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# goldmont arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/combo/arch/x86_64/tremont.mk b/core/combo/arch/x86_64/tremont.mk
new file mode 100644
index 0000000..b80d228
--- /dev/null
+++ b/core/combo/arch/x86_64/tremont.mk
@@ -0,0 +1,7 @@
+# This file contains feature macro definitions specific to the
+# tremont arch variant.
+#
+# See build/make/core/combo/arch/x86/x86-atom.mk for differences.
+#
+
+ARCH_X86_HAVE_SSE4_1 := true
diff --git a/core/config.mk b/core/config.mk
index e8b984d..26e90ef 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -166,6 +166,9 @@
 $(KATI_obsolete_var PRODUCT_SUPPORTS_BOOT_SIGNER,VB 1.0 and related variables are no longer supported)
 $(KATI_obsolete_var PRODUCT_VERITY_SIGNING_KEY,VB 1.0 and related variables are no longer supported)
 $(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and custom versions are no longer supported)
+$(KATI_obsolete_var BUILDING_PVMFW_IMAGE,BUILDING_PVMFW_IMAGE is no longer used)
+$(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE)
+
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
 FORCE:
@@ -230,6 +233,7 @@
 BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
 
 BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
+BUILD_SBOM_GEN :=$= $(BUILD_SYSTEM)/sbom.mk
 
 include $(BUILD_SYSTEM)/deprecation.mk
 
@@ -429,6 +433,9 @@
 $(hide) $(HOST_NM) -gP $(1) | cut -f1-2 -d" " | (grep -v U$$ >> $(2) || true)
 endef
 
+# Pick a Java compiler.
+include $(BUILD_SYSTEM)/combo/javac.mk
+
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ccache.mk
 include $(BUILD_SYSTEM)/goma.mk
@@ -451,9 +458,6 @@
   WITH_TIDY_ONLY :=
 endif
 
-# Pick a Java compiler.
-include $(BUILD_SYSTEM)/combo/javac.mk
-
 # ---------------------------------------------------------------
 # Check that the configuration is current.  We check that
 # BUILD_ENV_SEQUENCE_NUMBER is current against this value.
@@ -579,7 +583,6 @@
 endif
 PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
 NANOPB_SRCS := $(HOST_OUT_EXECUTABLES)/protoc-gen-nanopb
-VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
 MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
 MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
 LZ4 := $(HOST_OUT_EXECUTABLES)/lz4$(HOST_EXECUTABLE_SUFFIX)
@@ -612,11 +615,15 @@
 JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
 DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
 FAT16COPY := build/make/tools/fat16copy.py
-CHECK_ELF_FILE := build/make/tools/check_elf_file.py
+CHECK_ELF_FILE := $(HOST_OUT_EXECUTABLES)/check_elf_file$(HOST_EXECUTABLE_SUFFIX)
 LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
 ADD_IMG_TO_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/add_img_to_target_files$(HOST_EXECUTABLE_SUFFIX)
 BUILD_IMAGE := $(HOST_OUT_EXECUTABLES)/build_image$(HOST_EXECUTABLE_SUFFIX)
+ifeq (,$(strip $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)))
 BUILD_SUPER_IMAGE := $(HOST_OUT_EXECUTABLES)/build_super_image$(HOST_EXECUTABLE_SUFFIX)
+else
+BUILD_SUPER_IMAGE := $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)
+endif
 IMG_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/img_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 MAKE_RECOVERY_PATCH := $(HOST_OUT_EXECUTABLES)/make_recovery_patch$(HOST_EXECUTABLE_SUFFIX)
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
@@ -639,6 +646,8 @@
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
 
+GEN_SBOM := $(HOST_OUT_EXECUTABLES)/generate-sbom
+
 FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
 FINDBUGS := $(FINDBUGS_DIR)/findbugs
 
@@ -693,6 +702,14 @@
 PRODUCT_FULL_TREBLE_OVERRIDE ?=
 $(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
 
+ifneq ($(PRODUCT_SEPOLICY_SPLIT),true)
+# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+# letting upstream know it's important to you, we may do cleanup which breaks this
+# significantly. Please let us know if you are changing this.
+# TODO(b/257176017) - unsplit sepolicy is no longer supported
+PRODUCT_SEPOLICY_SPLIT := true
+endif
+
 # TODO(b/114488870): disallow PRODUCT_FULL_TREBLE_OVERRIDE from being used.
 .KATI_READONLY := \
     PRODUCT_FULL_TREBLE_OVERRIDE \
@@ -714,8 +731,13 @@
 endif
 
 # Starting in Android U, non-VNDK devices not supported
+# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+# letting upstream know it's important to you, we may do cleanup which breaks this
+# significantly. Please let us know if you are changing this.
 ifndef BOARD_VNDK_VERSION
+# READ WARNING - DO NOT CHANGE
 BOARD_VNDK_VERSION := current
+# READ WARNING - DO NOT CHANGE
 endif
 
 ifdef PRODUCT_PRODUCT_VNDK_VERSION
@@ -841,7 +863,6 @@
 
 # A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
 PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
-    28.0 \
     29.0 \
     30.0 \
     31.0 \
@@ -866,11 +887,6 @@
   endif
 endif
 
-# TODO(b/241346584): Mark BOARD_BUILD_SYSTEM_ROOT_IMAGE as KATI_obsolete_var after all users are removed
-ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
-    $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE is deprecated)
-endif
-
 ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
     ifneq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
         $(error PRODUCT_USE_DYNAMIC_PARTITION_SIZE must be true for devices with dynamic partitions)
@@ -1052,14 +1068,6 @@
 BOARD_PREBUILT_HIDDENAPI_DIR ?=
 .KATI_READONLY := BOARD_PREBUILT_HIDDENAPI_DIR
 
-ifdef USE_HOST_MUSL
-  ifneq (,$(or $(BUILD_BROKEN_USES_BUILD_HOST_EXECUTABLE),\
-               $(BUILD_BROKEN_USES_BUILD_HOST_SHARED_LIBRARY),\
-               $(BUILD_BROKEN_USES_BUILD_HOST_STATIC_LIBRARY)))
-    $(error USE_HOST_MUSL can't be set when native host builds are enabled in Make with BUILD_BROKEN_USES_BUILD_HOST_*)
-  endif
-endif
-
 # ###############################################################
 # Set up final options.
 # ###############################################################
diff --git a/core/definitions.mk b/core/definitions.mk
index afa7f7b..ce1248e 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -595,12 +595,10 @@
 ## license metadata.
 ###########################################################
 define declare-copy-target-license-metadata
-$(strip $(if $(filter $(OUT_DIR)%,$(2)),$(eval _dir:=$(call license-metadata-dir,$(1)))\
+$(strip $(if $(filter $(OUT_DIR)%,$(2)),\
   $(eval _tgt:=$(strip $(1)))\
-  $(eval _meta := $(call append-path,$(_dir),$(patsubst $(OUT_DIR)%,out%,$(_tgt).meta_lic)))\
   $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
-  $(eval ALL_COPIED_TARGETS += $(_tgt)),\
-  $(eval ALL_TARGETS.$(1).META_LIC:=$(module_license_metadata))))
+  $(eval ALL_COPIED_TARGETS += $(_tgt))))
 endef
 
 ###########################################################
@@ -746,11 +744,10 @@
 $(strip $(eval _dep:=))
 $(strip $(foreach s,$(ALL_COPIED_TARGETS.$(1).SOURCES),\
   $(eval _dmeta:=$(ALL_TARGETS.$(s).META_LIC))\
-  $(if $(filter 0p,$(_dmeta)),\
-    $(if $(filter-out 0p,$(_dep)),,$(eval ALL_TARGETS.$(1).META_LIC:=0p)),\
-    $(if $(_dep),\
-      $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),
-      $(eval _dep:=$(_dmeta))))))
+  $(if $(filter-out 0p,$(_dep)),\
+      $(if $(filter-out $(_dep),$(_dmeta)),$(error cannot copy target from multiple modules: $(1) from $(_dep) and $(_dmeta))),\
+      $(eval _dep:=$(_dmeta)))))
+$(if $(filter 0p,$(_dep)),$(eval ALL_TARGETS.$(1).META_LIC:=0p))
 $(strip $(if $(strip $(_dep)),,$(error cannot copy target from unknown module: $(1) from $(ALL_COPIED_TARGETS.$(1).SOURCES))))
 
 ifneq (0p,$(ALL_TARGETS.$(1).META_LIC))
@@ -772,6 +769,11 @@
 	  -o $$@
 
 endif
+
+$(eval _dep:=)
+$(eval _dmeta:=)
+$(eval _meta:=)
+$(eval _dir:=)
 endef
 
 ###########################################################
@@ -2121,6 +2123,7 @@
      $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
   ) \
   $(PRIVATE_LDFLAGS) \
+  $(PRIVATE_CRTBEGIN) \
   $(PRIVATE_ALL_OBJECTS) \
   -Wl,--whole-archive \
   $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
@@ -2129,8 +2132,10 @@
   $(PRIVATE_ALL_STATIC_LIBRARIES) \
   $(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
   $(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
+  $(PRIVATE_LIBCRT_BUILTINS) \
   $(PRIVATE_ALL_SHARED_LIBRARIES) \
   -o $@ \
+  $(PRIVATE_CRTEND) \
   $(PRIVATE_LDLIBS)
 endef
 endif
@@ -2264,6 +2269,7 @@
 ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
 define transform-host-o-to-executable-inner
 $(hide) $(PRIVATE_CXX_LINK) \
+  $(PRIVATE_CRTBEGIN) \
   $(PRIVATE_ALL_OBJECTS) \
   -Wl,--whole-archive \
   $(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES) \
@@ -2272,6 +2278,7 @@
   $(PRIVATE_ALL_STATIC_LIBRARIES) \
   $(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
   $(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
+  $(PRIVATE_LIBCRT_BUILTINS) \
   $(PRIVATE_ALL_SHARED_LIBRARIES) \
   $(foreach path,$(PRIVATE_RPATHS), \
     -Wl,-rpath,\$$ORIGIN/$(path)) \
@@ -2280,6 +2287,7 @@
   ) \
   $(PRIVATE_LDFLAGS) \
   -o $@ \
+  $(PRIVATE_CRTEND) \
   $(PRIVATE_LDLIBS)
 endef
 endif
@@ -2538,7 +2546,87 @@
         @$(call emit-line,$(wordlist 58001,58500,$(1)),$(2))
         @$(call emit-line,$(wordlist 58501,59000,$(1)),$(2))
         @$(call emit-line,$(wordlist 59001,59500,$(1)),$(2))
-        @$(if $(wordlist 59501,59502,$(1)),$(error Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 59501,60000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 60001,60500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 60501,61000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 61001,61500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 61501,62000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 62001,62500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 62501,63000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 63001,63500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 63501,64000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 64001,64500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 64501,65000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 65001,65500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 65501,66000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 66001,66500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 66501,67000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 67001,67500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 67501,68000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 68001,68500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 68501,69000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 69001,69500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 69501,70000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 70001,70500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 70501,71000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 71001,71500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 71501,72000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 72001,72500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 72501,73000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 73001,73500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 73501,74000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 74001,74500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 74501,75000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 75001,75500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 75501,76000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 76001,76500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 76501,77000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 77001,77500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 77501,78000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 78001,78500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 78501,79000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 79001,79500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 79501,80000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 80001,80500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 80501,81000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 81001,81500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 81501,82000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 82001,82500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 82501,83000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 83001,83500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 83501,84000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 84001,84500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 84501,85000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 85001,85500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 85501,86000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 86001,86500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 86501,87000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 87001,87500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 87501,88000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 88001,88500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 88501,89000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 89001,89500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 89501,90000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 90001,90500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 90501,91000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 91001,91500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 91501,92000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 92001,92500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 92501,93000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 93001,93500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 93501,94000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 94001,94500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 94501,95000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 95001,95500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 95501,96000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 96001,96500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 96501,97000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 97001,97500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 97501,98000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 98001,98500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 98501,99000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 99001,99500,$(1)),$(2))
+        @$(if $(wordlist 99501,99502,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
diff --git a/core/device.mk b/core/device.mk
deleted file mode 100644
index 20ff447..0000000
--- a/core/device.mk
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-_device_var_list := \
-    DEVICE_NAME \
-    DEVICE_BOARD \
-    DEVICE_REGION
-
-define dump-device
-$(info ==== $(1) ====)\
-$(foreach v,$(_device_var_list),\
-$(info DEVICES.$(1).$(v) := $(DEVICES.$(1).$(v))))\
-$(info --------)
-endef
-
-define dump-devices
-$(foreach p,$(DEVICES),$(call dump-device,$(p)))
-endef
-
-#
-# $(1): device to inherit
-#
-define inherit-device
-  $(foreach v,$(_device_var_list), \
-      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(strip $(1))))
-endef
-
-#
-# $(1): device makefile list
-#
-#TODO: check to make sure that devices have all the necessary vars defined
-define import-devices
-$(call import-nodes,DEVICES,$(1),$(_device_var_list))
-endef
-
-
-#
-# $(1): short device name like "sooner"
-#
-define _resolve-short-device-name
-  $(eval dn := $(strip $(1)))
-  $(eval d := \
-      $(foreach d,$(DEVICES), \
-          $(if $(filter $(dn),$(DEVICES.$(d).DEVICE_NAME)), \
-            $(d) \
-       )) \
-   )
-  $(eval d := $(sort $(d)))
-  $(if $(filter 1,$(words $(d))), \
-    $(d), \
-    $(if $(filter 0,$(words $(d))), \
-      $(error No matches for device "$(dn)"), \
-      $(error Device "$(dn)" ambiguous: matches $(d)) \
-    ) \
-  )
-endef
-
-#
-# $(1): short device name like "sooner"
-#
-define resolve-short-device-name
-$(strip $(call _resolve-short-device-name,$(1)))
-endef
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 593ad66..62c3ba3 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -62,10 +62,24 @@
 
 boot_zip := $(PRODUCT_OUT)/boot.zip
 bootclasspath_jars := $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
+
+# TODO remove system_server_jars usages from boot.zip and depend directly on system_server.zip file.
+
+# Use "/system" path for JARs with "platform:" prefix.
+# These JARs counterintuitively use "platform" prefix but they will
+# be actually installed to /system partition.
+platform_system_server_jars = $(filter platform:%, $(PRODUCT_SYSTEM_SERVER_JARS))
 system_server_jars := \
-  $(foreach m,$(PRODUCT_SYSTEM_SERVER_JARS),\
+  $(foreach m,$(platform_system_server_jars),\
     $(PRODUCT_OUT)/system/framework/$(call word-colon,2,$(m)).jar)
 
+# For the remaining system server JARs use the partition signified by the prefix.
+# For example, prefix "system_ext:" will use "/system_ext" path.
+other_system_server_jars = $(filter-out $(platform_system_server_jars), $(PRODUCT_SYSTEM_SERVER_JARS))
+system_server_jars += \
+  $(foreach m,$(other_system_server_jars),\
+    $(PRODUCT_OUT)/$(call word-colon,1,$(m))/framework/$(call word-colon,2,$(m)).jar)
+
 $(boot_zip): PRIVATE_BOOTCLASSPATH_JARS := $(bootclasspath_jars)
 $(boot_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
 $(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art)
@@ -79,5 +93,36 @@
 
 $(call dist-for-goals, droidcore, $(boot_zip))
 
+ifneq (,$(filter true,$(ART_MODULE_BUILD_FROM_SOURCE) $(MODULE_BUILD_FROM_SOURCE)))
+# Build the system_server.zip which contains the Apex system server jars and standalone system server jars
+system_server_zip := $(PRODUCT_OUT)/system_server.zip
+apex_system_server_jars := \
+  $(foreach m,$(PRODUCT_APEX_SYSTEM_SERVER_JARS),\
+    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+apex_standalone_system_server_jars := \
+  $(foreach m,$(PRODUCT_APEX_STANDALONE_SYSTEM_SERVER_JARS),\
+    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+standalone_system_server_jars := \
+  $(foreach m,$(PRODUCT_STANDALONE_SYSTEM_SERVER_JARS),\
+    $(PRODUCT_OUT)/apex/$(call word-colon,1,$(m))/javalib/$(call word-colon,2,$(m)).jar)
+
+$(system_server_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
+$(system_server_zip): PRIVATE_APEX_SYSTEM_SERVER_JARS := $(apex_system_server_jars)
+$(system_server_zip): PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS := $(apex_standalone_system_server_jars)
+$(system_server_zip): PRIVATE_STANDALONE_SYSTEM_SERVER_JARS := $(standalone_system_server_jars)
+$(system_server_zip): $(system_server_jars) $(apex_system_server_jars) $(apex_standalone_system_server_jars) $(standalone_system_server_jars) $(SOONG_ZIP)
+	@echo "Create system server package: $@"
+	rm -f $@
+	$(SOONG_ZIP) -o $@ \
+	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
+	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_SYSTEM_SERVER_JARS)) \
+          -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_APEX_STANDALONE_SYSTEM_SERVER_JARS)) \
+	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_STANDALONE_SYSTEM_SERVER_JARS))
+
+$(call dist-for-goals, droidcore, $(system_server_zip))
+
+endif  #ART_MODULE_BUILD_FROM_SOURCE || MODULE_BUILD_FROM_SOURCE
 endif  #PRODUCT_USES_DEFAULT_ART_CONFIG
 endif  #WITH_DEXPREOPT
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index c11b7f4..e36e2eb 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -130,6 +130,7 @@
   $(call add_json_str,  Dex2oatXmx,                              $(DEX2OAT_XMX))
   $(call add_json_str,  Dex2oatXms,                              $(DEX2OAT_XMS))
   $(call add_json_str,  EmptyDirectory,                          $(OUT_DIR)/empty)
+  $(call add_json_bool, EnableUffdGc,                            $(filter true,$(ENABLE_UFFD_GC)))
 
 ifdef TARGET_ARCH
   $(call add_json_map,  CpuVariant)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index b303b52..d498875 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -272,12 +272,13 @@
 my_dexpreopt_images_deps :=
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
-# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
-# for each boot image (primary and the framework extension). The only reason why
-# the primary image is exposed to Make is testing (art gtests) and benchmarking
-# (art golem benchmarks). Install rules that use those variables are in
-# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
-my_dexpreopt_infix := boot
+# Infix can be 'art', 'boot', or 'mainline'. Soong creates a set of variables
+# for Make, one or each boot image (primary, the framework extension, and the
+# mainline extension). The only reason why the primary image is exposed to Make
+# is testing (art gtests) and benchmarking (art golem benchmarks). Install rules
+# that use those variables are in dex_preopt_libart.mk. Here for dexpreopt
+# purposes the infix is always 'boot' or 'mainline'.
+my_dexpreopt_infix := $(if $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)),mainline,boot)
 my_create_dexpreopt_config :=
 
 ifdef LOCAL_DEX_PREOPT
@@ -447,6 +448,7 @@
 
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
   my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
+  DEXPREOPT.$(LOCAL_MODULE).POST_INSTALLED_DEXPREOPT_ZIP := $(my_dexpreopt_zip)
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
@@ -506,4 +508,4 @@
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
-endif # my_create_dexpreopt_config
\ No newline at end of file
+endif # my_create_dexpreopt_config
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index 6f3d14f..4f313bf 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -3,18 +3,6 @@
 # what to add to the path given the config we have chosen.
 ifeq ($(CALLED_FROM_SETUP),true)
 
-ifneq ($(filter /%,$(SOONG_HOST_OUT_EXECUTABLES)),)
-ABP := $(SOONG_HOST_OUT_EXECUTABLES)
-else
-ABP := $(PWD)/$(SOONG_HOST_OUT_EXECUTABLES)
-endif
-ifneq ($(filter /%,$(HOST_OUT_EXECUTABLES)),)
-ABP := $(ABP):$(HOST_OUT_EXECUTABLES)
-else
-ABP := $(ABP):$(PWD)/$(HOST_OUT_EXECUTABLES)
-endif
-
-ANDROID_BUILD_PATHS := $(ABP)
 ANDROID_PREBUILTS := prebuilt/$(HOST_PREBUILT_TAG)
 ANDROID_GCC_PREBUILTS := prebuilts/gcc/$(HOST_PREBUILT_TAG)
 ANDROID_CLANG_PREBUILTS := prebuilts/clang/host/$(HOST_PREBUILT_TAG)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index fc4afd9..7dd9b12 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -135,15 +135,17 @@
   HOST_OS := darwin
 endif
 
-HOST_OS_EXTRA := $(shell uname -rsm)
-ifeq ($(HOST_OS),linux)
-  ifneq ($(wildcard /etc/os-release),)
-    HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+ifeq ($(CALLED_FROM_SETUP),true)
+  HOST_OS_EXTRA := $(shell uname -rsm)
+  ifeq ($(HOST_OS),linux)
+    ifneq ($(wildcard /etc/os-release),)
+      HOST_OS_EXTRA += $(shell source /etc/os-release; echo $$PRETTY_NAME)
+    endif
+  else ifeq ($(HOST_OS),darwin)
+    HOST_OS_EXTRA += $(shell sw_vers -productVersion)
   endif
-else ifeq ($(HOST_OS),darwin)
-  HOST_OS_EXTRA += $(shell sw_vers -productVersion)
+  HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
 endif
-HOST_OS_EXTRA := $(subst $(space),-,$(HOST_OS_EXTRA))
 
 # BUILD_OS is the real host doing the build.
 BUILD_OS := $(HOST_OS)
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 0cf62a4..2ff9ff2 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -39,6 +39,21 @@
 endif
 my_libdir :=
 
+my_crtbegin :=
+my_crtend :=
+my_libcrt_builtins :=
+ifdef USE_HOST_MUSL
+  my_crtbegin := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtbegin_dynamic)
+  my_crtend := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtend)
+  my_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+  $(LOCAL_BUILT_MODULE): PRIVATE_LDFLAGS += -Wl,--no-dynamic-linker
+endif
+
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTBEGIN := $(my_crtbegin)
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTEND := $(my_crtend)
+$(LOCAL_BUILT_MODULE): PRIVATE_LIBCRT_BUILTINS := $(my_libcrt_builtins)
+$(LOCAL_BUILT_MODULE): $(my_crtbegin) $(my_crtend) $(my_libcrt_builtins)
+
 $(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries) $(CLANG_CXX)
 	$(transform-host-o-to-executable)
 
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index da20874..ae8b798 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -36,6 +36,17 @@
 my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
 $(LOCAL_BUILT_MODULE): PRIVATE_HOST_LIBPROFILE_RT := $(my_host_libprofile_rt)
 
+ifdef USE_HOST_MUSL
+  my_crtbegin := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtbegin_so)
+  my_crtend := $(SOONG_$(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OBJECT_libc_musl_crtend_so)
+  my_libcrt_builtins := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBCRT_BUILTINS)
+endif
+
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTBEGIN := $(my_crtbegin)
+$(LOCAL_BUILT_MODULE): PRIVATE_CRTEND := $(my_crtend)
+$(LOCAL_BUILT_MODULE): PRIVATE_LIBCRT_BUILTINS := $(my_libcrt_builtins)
+$(LOCAL_BUILT_MODULE): $(my_crtbegin) $(my_crtend) $(my_libcrt_builtins)
+
 $(LOCAL_BUILT_MODULE): \
         $(all_objects) \
         $(all_libraries) \
diff --git a/core/java_common.mk b/core/java_common.mk
index 5981b60..0e03d0b 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -296,16 +296,16 @@
       # Note: the lib naming scheme must be kept in sync with build/soong/java/sdk_library.go.
       sdk_lib_suffix = $(call pretty-error,sdk_lib_suffix was not set correctly)
       ifeq (current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_stubs_current
+        sdk_module := $(ANDROID_PUBLIC_STUBS)
         sdk_lib_suffix := .stubs
       else ifeq (system_current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_system_stubs_current
+        sdk_module := $(ANDROID_SYSTEM_STUBS)
         sdk_lib_suffix := .stubs.system
       else ifeq (test_current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_test_stubs_current
+        sdk_module := $(ANDROID_TEST_STUBS)
         sdk_lib_suffix := .stubs.test
       else ifeq (core_current,$(LOCAL_SDK_VERSION))
-        sdk_module := core.current.stubs
+        sdk_module := $(ANDROID_CORE_STUBS)
         sdk_lib_suffix = $(call pretty-error,LOCAL_SDK_LIBRARIES not supported for LOCAL_SDK_VERSION = core_current)
       endif
       sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name)$(sdk_lib_suffix))
diff --git a/core/java_host_unit_test_config_template.xml b/core/java_host_unit_test_config_template.xml
index d8795f9..5d8b254 100644
--- a/core/java_host_unit_test_config_template.xml
+++ b/core/java_host_unit_test_config_template.xml
@@ -23,5 +23,14 @@
 
     <test class="com.android.tradefed.testtype.IsolatedHostTest" >
         <option name="jar" value="{MODULE}.jar" />
+        <option name="java-flags" value="--add-modules=jdk.compiler"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.api=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.code=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.comp=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.file=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.main=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.parser=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.tree=ALL-UNNAMED"/>
+        <option name="java-flags" value="--add-exports=jdk.compiler/com.sun.tools.javac.util=ALL-UNNAMED"/>
     </test>
 </configuration>
diff --git a/core/main.mk b/core/main.mk
index 2e39601..e84dfaa 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -72,8 +72,6 @@
 
 # CTS-specific config.
 -include cts/build/config.mk
-# VTS-specific config.
--include test/vts/tools/vts-tradefed/build/config.mk
 # device-tests-specific-config.
 -include tools/tradefederation/build/suites/device-tests/config.mk
 # general-tests-specific-config.
@@ -92,6 +90,9 @@
 -include test/catbox/tools/build/config.mk
 # CTS-Root-specific config.
 -include test/cts-root/tools/build/config.mk
+# WVTS-specific config.
+-include test/wvts/tools/build/config.mk
+
 
 # Clean rules
 .PHONY: clean-dex-files
@@ -493,6 +494,8 @@
 # Typical build; include any Android.mk files we can find.
 #
 
+include $(BUILD_SYSTEM)/art_config.mk
+
 # Bring in dex_preopt.mk
 # This creates some modules so it needs to be included after
 # should-install-to-system is defined (in order for base_rules.mk to function
@@ -759,6 +762,9 @@
     $(info $(word 1,$(r)) module $(word 2,$(r)) requires non-existent $(word 3,$(r)) module: $(word 4,$(r))) \
   )
   $(warning Set BUILD_BROKEN_MISSING_REQUIRED_MODULES := true to bypass this check if this is intentional)
+  ifneq (,$(PRODUCT_SOURCE_ROOT_DIRS))
+    $(warning PRODUCT_SOURCE_ROOT_DIRS is non-empty. Some necessary modules may have been skipped by Soong)
+  endif
   $(error Build failed)
 endif # _nonexistent_required != empty
 endif # check_missing_required_modules == true
@@ -1246,6 +1252,7 @@
     $(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
     $(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
     $(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+    $(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
     $(call auto-included-modules) \
   ) \
   $(eval ### Filter out the overridden packages and executables before doing expansion) \
@@ -1340,6 +1347,13 @@
                   $(if $(ALL_MODULES.$(m).INSTALLED),\
                     $(if $(filter-out $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,\
                       $(m))))
+    ifeq ($(TARGET_ARCH),riscv64)
+      # HACK: riscv64 can't build the device version of bcc and ld.mc due to a
+      # dependency on an old version of LLVM, but they are listed in
+      # base_system.mk which can't add them conditionally based on the target
+      # architecture.
+      _host_modules := $(filter-out bcc ld.mc,$(_host_modules))
+    endif
     $(call maybe-print-list-and-error,$(sort $(_host_modules)),\
       Host modules should be in PRODUCT_HOST_PACKAGES$(comma) not PRODUCT_PACKAGES)
   endif
@@ -1600,6 +1614,9 @@
 .PHONY: vbmetavendorimage
 vbmetavendorimage: $(INSTALLED_VBMETA_VENDORIMAGE_TARGET)
 
+.PHONY: vbmetacustomimages
+vbmetacustomimages: $(foreach partition,$(call to-upper,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)),$(INSTALLED_VBMETA_$(partition)IMAGE_TARGET))
+
 # The droidcore-unbundled target depends on the subset of targets necessary to
 # perform a full system build (either unbundled or not).
 .PHONY: droidcore-unbundled
@@ -1884,11 +1901,11 @@
   endif
 
   # Put XML formatted API files in the dist dir.
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,android_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,android_system_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,android_module_lib_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,android_system_server_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,android_test_stubs_current) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,$(ANDROID_PUBLIC_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,$(ANDROID_SYSTEM_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,$(ANDROID_MODULE_LIB_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,$(ANDROID_SYSTEM_SERVER_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,$(ANDROID_TEST_STUBS)) $(APICHECK)
 
   api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml module-lib-api.xml system-server-api.xml test-api.xml)
   $(api_xmls):
@@ -2011,6 +2028,183 @@
 # missing dependency errors.
 $(call build-license-metadata)
 
+# Generate SBOM in SPDX format
+product_copy_files_without_owner := $(foreach pcf,$(PRODUCT_COPY_FILES),$(call word-colon,1,$(pcf)):$(call word-colon,2,$(pcf)))
+ifeq ($(TARGET_BUILD_APPS),)
+dest_files_without_source := $(sort $(foreach pcf,$(product_copy_files_without_owner),$(if $(wildcard $(call word-colon,1,$(pcf))),,$(call word-colon,2,$(pcf)))))
+dest_files_without_source := $(addprefix $(PRODUCT_OUT)/,$(dest_files_without_source))
+filter_out_files := \
+  $(PRODUCT_OUT)/apex/% \
+  $(PRODUCT_OUT)/fake_packages/% \
+  $(PRODUCT_OUT)/testcases/% \
+  $(dest_files_without_source)
+# Check if each partition image is built, if not filter out all its installed files
+# Also check if a partition uses prebuilt image file, save the info if prebuilt image is used.
+PREBUILT_PARTITION_COPY_FILES :=
+# product.img
+ifndef BUILDING_PRODUCT_IMAGE
+filter_out_files += $(PRODUCT_OUT)/product/%
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_PRODUCTIMAGE):$(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+endif
+
+# system.img
+ifndef BUILDING_SYSTEM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system/%
+endif
+# system_dlkm.img
+ifndef BUILDING_SYSTEM_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_dlkm/%
+ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_SYSTEM_DLKMIMAGE):$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+endif
+endif
+# system_ext.img
+ifndef BUILDING_SYSTEM_EXT_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_ext/%
+ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_SYSTEM_EXTIMAGE):$(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+endif
+endif
+# system_other.img
+ifndef BUILDING_SYSTEM_OTHER_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_other/%
+endif
+
+# odm.img
+ifndef BUILDING_ODM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/odm/%
+ifdef BOARD_PREBUILT_ODMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_ODMIMAGE):$(INSTALLED_ODMIMAGE_TARGET)
+endif
+endif
+# odm_dlkm.img
+ifndef BUILDING_ODM_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/odm_dlkm/%
+ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_ODM_DLKMIMAGE):$(INSTALLED_ODM_DLKMIMAGE_TARGET)
+endif
+endif
+
+# vendor.img
+ifndef BUILDING_VENDOR_IMAGE
+filter_out_files += $(PRODUCT_OUT)/vendor/%
+ifdef BOARD_PREBUILT_VENDORIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_VENDORIMAGE):$(INSTALLED_VENDORIMAGE_TARGET)
+endif
+endif
+# vendor_dlkm.img
+ifndef BUILDING_VENDOR_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/vendor_dlkm/%
+ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_VENDOR_DLKMIMAGE):$(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+endif
+endif
+
+# cache.img
+ifndef BUILDING_CACHE_IMAGE
+filter_out_files += $(PRODUCT_OUT)/cache/%
+endif
+
+# boot.img
+ifndef BUILDING_BOOT_IMAGE
+ifdef BOARD_PREBUILT_BOOTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_BOOTIMAGE):$(INSTALLED_BOOTIMAGE_TARGET)
+endif
+endif
+# init_boot.img
+ifndef BUILDING_INIT_BOOT_IMAGE
+ifdef BOARD_PREBUILT_INIT_BOOT_IMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_INIT_BOOT_IMAGE):$(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+endif
+endif
+
+# ramdisk.img
+ifndef BUILDING_RAMDISK_IMAGE
+filter_out_files += $(PRODUCT_OUT)/ramdisk/%
+endif
+
+# recovery.img
+ifndef INSTALLED_RECOVERYIMAGE_TARGET
+filter_out_files += $(PRODUCT_OUT)/recovery/%
+endif
+
+installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
+else
+installed_files := $(apps_only_installed_files)
+endif  # TARGET_BUILD_APPS
+
+# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py.
+# There are multiple columns and each identifies the source of an installed file for a specific case.
+# The columns and their uses are described as below:
+#   installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk
+#   module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2
+#   soong_module_type: Soong module type, e.g. android_app, cc_binary
+#   is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk
+#   product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc
+#   kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files
+#   is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y
+#       is_build_prop: build.prop in each partition, see sysprop.mk.
+#       is_notice_file: NOTICE.xml.gz in each partition, see Makefile.
+#       is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make
+#       is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET
+#       is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER
+#       is_event_log_tags_file: see variable event_log_tags_file in Makefile
+#       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
+#       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
+#       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
+
+# (TODO: b/272358583 find another way of always rebuilding this target)
+# Remove the sbom-metadata.csv whenever makefile is evaluated
+$(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
+$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
+	rm -f $@
+	@echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated >> $@
+	$(foreach f,$(installed_files),\
+	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
+	  $(eval _post_installed_dexpreopt_zip := $(DEXPREOPT.$(_module_name).POST_INSTALLED_DEXPREOPT_ZIP)) \
+	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
+	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
+	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
+	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
+	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
+	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
+	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
+	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
+	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
+	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
+	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
+	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
+	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ $(newline) \
+	  $(if $(_post_installed_dexpreopt_zip), \
+	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ ; done $(newline) \
+	  ) \
+	)
+
+.PHONY: sbom
+ifeq ($(TARGET_BUILD_APPS),)
+sbom: $(PRODUCT_OUT)/sbom.spdx.json
+$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
+$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --json
+
+$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
+else
+apps_only_sbom_files := $(sort $(patsubst %,%.spdx,$(apps_only_installed_files)))
+$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --unbundled
+
+sbom: $(apps_only_sbom_files)
+endif
+
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index db7c422..1e1b7df 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -21,8 +21,8 @@
 	$(copy-file-to-target)
 endif
 
-$(call declare-0p-target,$(target_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_notice_html_or_xml_gz))
+$(call declare-1p-target,$(target_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_notice_html_or_xml_gz))
 endif
 
 .PHONY: vendorlicense
@@ -43,8 +43,8 @@
 $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_notice_xml_gz))
 endif
 
 .PHONY: odmlicense
@@ -62,8 +62,8 @@
 $(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_notice_xml_gz))
 endif
 
 .PHONY: oemlicense
@@ -84,8 +84,8 @@
 $(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_product_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_product_notice_xml_gz))
+$(call declare-1p-target,$(target_product_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_product_notice_xml_gz))
 endif
 
 .PHONY: systemextlicense
@@ -103,8 +103,8 @@
 $(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
+$(call declare-1p-target,$(target_system_ext_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_system_ext_notice_xml_gz))
 endif
 
 .PHONY: vendor_dlkmlicense
@@ -122,8 +122,8 @@
 $(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_dlkm_notice_xml_gz))
 endif
 
 .PHONY: odm_dlkmlicense
@@ -141,8 +141,8 @@
 $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_dlkm_notice_xml_gz))
 endif
 
 .PHONY: system_dlkmlicense
@@ -160,8 +160,8 @@
 $(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_system_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_sysetm_dlkm_notice_xml_gz))
 endif
 
 endif # not TARGET_BUILD_APPS
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index ef1471d..5bea9b6 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -57,6 +57,9 @@
   $(error $(LOCAL_MODULE) : unexpected LOCAL_MODULE_CLASS for prebuilts: $(LOCAL_MODULE_CLASS))
 endif
 
+$(if $(filter-out $(SOONG_ANDROID_MK),$(LOCAL_MODULE_MAKEFILE)), \
+  $(eval ALL_MODULES.$(my_register_name).IS_PREBUILT_MAKE_MODULE := Y))
+
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
diff --git a/core/product.mk b/core/product.mk
index 277fa74..cdc3d09 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -24,6 +24,8 @@
 
 _product_single_value_vars += PRODUCT_NAME
 _product_single_value_vars += PRODUCT_MODEL
+_product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
+_product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
 
 # The resoure configuration options to use for this product.
 _product_list_vars += PRODUCT_LOCALES
@@ -34,15 +36,18 @@
 _product_list_vars += PRODUCT_PACKAGES
 _product_list_vars += PRODUCT_PACKAGES_DEBUG
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ARM64
 # Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
 _product_list_vars += PRODUCT_PACKAGES_ENG
 _product_list_vars += PRODUCT_PACKAGES_TESTS
+_product_list_vars += PRODUCT_AFDO_PROFILES
 
 # The device that this product maps to.
 _product_single_value_vars += PRODUCT_DEVICE
 _product_single_value_vars += PRODUCT_MANUFACTURER
 _product_single_value_vars += PRODUCT_BRAND
+_product_single_value_vars += PRODUCT_BRAND_FOR_ATTESTATION
 
 # These PRODUCT_SYSTEM_* flags, if defined, are used in place of the
 # corresponding PRODUCT_* flags for the sysprops on /system.
@@ -263,6 +268,12 @@
 # This flag implies PRODUCT_USE_DYNAMIC_PARTITIONS.
 _product_single_value_vars += PRODUCT_RETROFIT_DYNAMIC_PARTITIONS
 
+# List of tags that will be used to gate blueprint modules from the build graph
+_product_list_vars += PRODUCT_INCLUDE_TAGS
+
+# List of directories that will be used to gate blueprint modules from the build graph
+_product_list_vars += PRODUCT_SOURCE_ROOT_DIRS
+
 # When this is true, various build time as well as runtime debugfs restrictions are enabled.
 _product_single_value_vars += PRODUCT_SET_DEBUGFS_RESTRICTIONS
 
@@ -367,6 +378,24 @@
 # BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE but not an explicitly set value.
 _product_single_value_vars += PRODUCT_MODULE_BUILD_FROM_SOURCE
 
+# If true, installs a full version of com.android.virt APEX.
+_product_single_value_vars += PRODUCT_AVF_ENABLED
+
+# List of .json files to be merged/compiled into vendor/etc/linker.config.pb
+_product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
+
+# Whether to use userfaultfd GC.
+# Possible values are:
+# - "default" or empty: both the build system and the runtime determine whether to use userfaultfd
+#   GC based on the vendor API level
+# - "true": forces the build system to use userfaultfd GC regardless of the vendor API level; the
+#   runtime determines whether to use userfaultfd GC based on the kernel support. Note that the
+#   device may have to re-compile everything on the first boot if the kernel doesn't support
+#   userfaultfd
+# - "false": disallows the build system and the runtime to use userfaultfd GC even if the device
+#   supports it
+_product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index e03ae2b..1ef8890 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -74,7 +74,7 @@
 ###########################################################
 
 define find-copy-subdir-files
-$(sort $(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g"))
+$(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g" | sort)
 endef
 
 #
@@ -144,7 +144,6 @@
 #
 include $(BUILD_SYSTEM)/node_fns.mk
 include $(BUILD_SYSTEM)/product.mk
-include $(BUILD_SYSTEM)/device.mk
 
 # Read all product definitions.
 #
@@ -274,6 +273,14 @@
 current_product_makefile :=
 
 #############################################################################
+# Check product include tag allowlist
+BLUEPRINT_INCLUDE_TAGS_ALLOWLIST := com.android.mainline_go com.android.mainline
+.KATI_READONLY := BLUEPRINT_INCLUDE_TAGS_ALLOWLIST
+$(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
+	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
+	$(call pretty-error, $(include_tag) is not in BLUEPRINT_INCLUDE_TAGS_ALLOWLIST: $(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST))))
+#############################################################################
+
 # Quick check and assign default values
 
 TARGET_DEVICE := $(PRODUCT_DEVICE)
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 7a5e501..97c1d00 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -59,6 +59,12 @@
         if _options.format == "pretty":
             print(attr, "=", repr(value))
         elif _options.format == "make":
+            value = list(value)
+            for i, x in enumerate(value):
+                if type(x) == "tuple" and len(x) == 1:
+                    value[i] = "@inherit:" + x[0] + ".mk"
+                elif type(x) != "string":
+                    fail("Wasn't a list of strings:", attr, " value:", value)
             print(attr, ":=", " ".join(value))
     elif _options.format == "pretty":
         print(attr, "=", repr(value))
@@ -456,6 +462,9 @@
 
 def __words(string_or_list):
     if type(string_or_list) == "list":
+        for x in string_or_list:
+            if type(x) != "string":
+                return string_or_list
         string_or_list = " ".join(string_or_list)
     return _mkstrip(string_or_list).split()
 
@@ -830,6 +839,41 @@
     return [ __mkpatsubst_word(parsed_percent, parsed_src, x) + ":" + __mkpatsubst_word(parsed_percent, parsed_dest, x) for x in words]
 
 
+__zero_values = {
+    "string": "",
+    "list": [],
+    "int": 0,
+    "float": 0,
+    "bool": False,
+    "dict": {},
+    "NoneType": None,
+    "tuple": (),
+}
+def __zero_value(x):
+    t = type(x)
+    if t in __zero_values:
+        return __zero_values[t]
+    else:
+        fail("Unknown type: "+t)
+
+
+def _clear_var_list(g, h, var_list):
+    cfg = __h_cfg(h)
+    for v in __words(var_list):
+        # Set these variables to their zero values rather than None
+        # or removing them from the dictionary because if they were
+        # removed entirely, ?= would set their value, when it would not
+        # after a make-based clear_var_list call.
+        if v in g:
+            g[v] = __zero_value(g[v])
+        if v in cfg:
+            cfg[v] = __zero_value(cfg[v])
+
+        if v not in cfg and v not in g:
+            # Cause the variable to appear set like the make version does
+            g[v] = ""
+
+
 def __get_options():
     """Returns struct containing runtime global settings."""
     settings = dict(
@@ -873,6 +917,7 @@
     addsuffix = _addsuffix,
     board_platform_in = _board_platform_in,
     board_platform_is = _board_platform_is,
+    clear_var_list = _clear_var_list,
     copy_files = _copy_files,
     copy_if_exists = _copy_if_exists,
     cfg = __h_cfg,
diff --git a/core/proguard.flags b/core/proguard.flags
index 53f63d8..d790061 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,14 +9,19 @@
 # Add this flag in your package's own configuration if it's needed.
 #-flattenpackagehierarchy
 
-# Keep classes and methods that have @VisibleForTesting annotations, except in
-# intermediate libraries that export those annotations (e.g., androidx, guava).
-# This avoids keeping library-specific test code that isn't actually needed
-# for platform testing.
+# Keep classes and members with the platform-defined @VisibleForTesting annotation.
+-keep @com.android.internal.annotations.VisibleForTesting class *
+-keepclassmembers class * {
+    @com.android.internal.annotations.VisibleForTesting *;
+}
+
+# Keep classes and members with non-platform @VisibleForTesting annotations, but
+# only within platform-defined packages. This avoids keeping external, library-specific
+# test code that isn't actually needed for platform testing.
 # TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
 # and com.google.common.annotations.VisibleForTesting use in platform code.
--keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
--keepclassmembers class !androidx.**,!com.google.common.**,* {
+-keep @**.VisibleForTesting class android.**,com.android.**,com.google.android.**
+-keepclassmembers class android.**,com.android.**,com.google.android.** {
     @**.VisibleForTesting *;
 }
 
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 7e7b270..b59527a 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -41,6 +41,11 @@
     java.lang.Object readResolve();
 }
 
+# Keep all Javascript API methods
+-keepclassmembers class * {
+    @android.webkit.JavascriptInterface <methods>;
+}
+
 # Keep Throwable's constructor that takes a String argument.
 -keepclassmembers class * extends java.lang.Throwable {
   <init>(java.lang.String);
diff --git a/core/python_binary_host_mobly_test_config_template.xml b/core/python_binary_host_mobly_test_config_template.xml
new file mode 100644
index 0000000..a6576cd
--- /dev/null
+++ b/core/python_binary_host_mobly_test_config_template.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for {MODULE} mobly test">
+    {EXTRA_CONFIGS}
+
+    <device name="device1"></device>
+    <device name="device2"></device>
+
+    <test class="com.android.tradefed.testtype.mobly.MoblyBinaryHostTest">
+      <!-- The mobly-par-file-name should match the module name -->
+      <option name="mobly-par-file-name" value="{MODULE}" />
+      <!-- Timeout limit in milliseconds for all test cases of the python binary -->
+      <option name="mobly-test-timeout" value="300000" />
+    </test>
+</configuration>
diff --git a/core/rbe.mk b/core/rbe.mk
index 65abde5..6754b0a 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -81,11 +81,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(JAVA))
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/host/linux-x86/framework/r8.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(firstword $(JAVA)))
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=$(JAVA))
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/host/linux-x86/framework/d8.jar --toolchain_inputs=$(firstword $(JAVA)))
   endif
 
   rbe_dir :=
diff --git a/core/sbom.mk b/core/sbom.mk
new file mode 100644
index 0000000..e23bbc1
--- /dev/null
+++ b/core/sbom.mk
@@ -0,0 +1,11 @@
+# For SBOM generation
+# This is included by base_rules.mk and is not necessary to be included in other .mk files
+# unless a .mk file changes its installed file after including base_rules.mk.
+
+ifdef my_register_name
+  ifneq (, $(strip $(ALL_MODULES.$(my_register_name).INSTALLED)))
+    $(foreach installed_file,$(ALL_MODULES.$(my_register_name).INSTALLED),\
+      $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name))\
+    )
+  endif
+endif
\ No newline at end of file
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 786a755..dd550b5 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -162,19 +162,21 @@
 # embedded JNI will already have been handled by soong
 my_embed_jni :=
 my_prebuilt_jni_libs :=
-ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
-  my_2nd_arch_prefix :=
-  LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
-  partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
-  include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
-endif
-ifdef TARGET_2ND_ARCH
-  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
-    my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
-    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
+    my_2nd_arch_prefix :=
+    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
     include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
   endif
+  ifdef TARGET_2ND_ARCH
+    ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
+      my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+      LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+      partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+      include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+    endif
+  endif
 endif
 LOCAL_SHARED_JNI_LIBRARIES :=
 my_embed_jni :=
@@ -267,3 +269,8 @@
 endif
 
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/soong_config.mk b/core/soong_config.mk
index b000df6..6348cf0 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -9,6 +9,7 @@
 endif
 endif
 
+include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
 
 ifeq ($(WRITE_SOONG_VARIABLES),true)
@@ -30,6 +31,7 @@
 $(call add_json_val,  Platform_sdk_extension_version,    $(PLATFORM_SDK_EXTENSION_VERSION))
 $(call add_json_val,  Platform_base_sdk_extension_version, $(PLATFORM_BASE_SDK_EXTENSION_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
+$(call add_json_csv,  Platform_version_all_preview_codenames, $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES))
 $(call add_json_str,  Platform_security_patch,           $(PLATFORM_SECURITY_PATCH))
 $(call add_json_str,  Platform_preview_sdk_version,      $(PLATFORM_PREVIEW_SDK_VERSION))
 $(call add_json_str,  Platform_base_os,                  $(PLATFORM_BASE_OS))
@@ -248,9 +250,9 @@
 $(call add_json_list, MissingUsesLibraries,              $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES))
 
 $(call add_json_map, VendorVars)
-$(foreach namespace,$(SOONG_CONFIG_NAMESPACES),\
+$(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
   $(call add_json_map, $(namespace))\
-  $(foreach key,$(SOONG_CONFIG_$(namespace)),\
+  $(foreach key,$(sort $(SOONG_CONFIG_$(namespace))),\
     $(call add_json_str,$(key),$(subst ",\",$(SOONG_CONFIG_$(namespace)_$(key)))))\
   $(call end_json_map))
 $(call end_json_map)
@@ -265,6 +267,10 @@
 
 $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX)))
 
+ifndef APEX_BUILD_FOR_PRE_S_DEVICES
+$(call add_json_bool, TrimmedApex, $(filter true,$(PRODUCT_TRIMMED_APEX)))
+endif
+
 $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 
 $(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES))
@@ -281,6 +287,7 @@
 $(call add_json_bool, BuildBrokenDepfile,                 $(filter true,$(BUILD_BROKEN_DEPFILE)))
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
+$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
 $(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
 
@@ -299,6 +306,11 @@
 
 $(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
 
+$(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
+$(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
+
+$(call add_json_list, AfdoProfiles,                $(PRODUCT_AFDO_PROFILES))
+
 $(call json_end)
 
 $(file >$(SOONG_VARIABLES).tmp,$(json_contents))
diff --git a/core/sysprop.mk b/core/sysprop.mk
index b51818a..bd6f3d9 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -46,6 +46,10 @@
         echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
         echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
         echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
+        # Attestation specific properties for AOSP/GSI build running on device.
+        echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
+        echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
+        echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
     )\
     $(if $(filter true,$(ZYGOTE_FORCE_64)),\
         $(if $(filter vendor,$(1)),\
@@ -137,7 +141,7 @@
 	    fi;)
 	$(hide) echo "# end of file" >> $$@
 
-$(call declare-0p-target,$(2))
+$(call declare-1p-target,$(2))
 endef
 
 # -----------------------------------------------------------------
@@ -539,3 +543,19 @@
     $(empty)))
 
 $(eval $(call declare-1p-target,$(INSTALLED_RAMDISK_BUILD_PROP_TARGET)))
+
+ALL_INSTALLED_BUILD_PROP_FILES := \
+  $(INSTALLED_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_BUILD_PROP_TARGET) \
+  $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) \
+  $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system/build.prop
+define is-build-prop
+$(if $(findstring $1,$(ALL_INSTALLED_BUILD_PROP_FILES)),Y)
+endef
\ No newline at end of file
diff --git a/core/tasks/OWNERS b/core/tasks/OWNERS
deleted file mode 100644
index 594930d..0000000
--- a/core/tasks/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-per-file art-host-tests.mk = dshi@google.com,dsrbecky@google.com,jdesprez@google.com,rpl@google.com
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index 2af1ded..ff9eb09 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -24,25 +24,55 @@
     $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
     $(_cmf_src)))
 
-$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+# Create an artifact to include a list of test config files in art-host-tests.
+art_host_tests_list_zip := $(PRODUCT_OUT)/art-host-tests_list.zip
+# Create an artifact to include all test config files in art-host-tests.
+art_host_tests_configs_zip := $(PRODUCT_OUT)/art-host-tests_configs.zip
+# Create an artifact to include all shared library files in art-host-tests.
+art_host_tests_host_shared_libs_zip := $(PRODUCT_OUT)/art-host-tests_host-shared-libs.zip
 
+$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_list_zip := $(art_host_tests_list_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_configs_zip := $(art_host_tests_configs_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_host_shared_libs_zip := $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
 $(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests) $(SOONG_ZIP)
-	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $@.list
-	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
-	$(hide) touch $@-host-libs.list
+	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
+	rm -f $@ $(PRIVATE_art_host_tests_list_zip)
+	mkdir -p $(PRIVATE_INTERMEDIATES_DIR)
+	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
+	$(hide) touch $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
-	  echo $$shared_lib >> $@-host-libs.list; \
+	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
-	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
-	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
+	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
+	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
-$(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
+$(call dist-for-goals, art-host-tests, $(art_host_tests_zip) $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip))
 
 $(call declare-1p-container,$(art_host_tests_zip),)
 $(call declare-container-license-deps,$(art_host_tests_zip),$(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests),$(PRODUCT_OUT)/:/)
 
 tests: art-host-tests
+
+intermediates_dir :=
+art_host_tests_zip :=
+art_host_tests_list_zip :=
+art_host_tests_configs_zip :=
+art_host_tests_host_shared_libs_zip :=
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 3196f52..4167a7e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -39,7 +39,7 @@
 	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
 	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 5726ee2..8dbc76f 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -87,7 +87,8 @@
 	$(SOONG_ZIP) -d -o $@ \
 	  -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
+	  -sha256
 	$(SOONG_ZIP) -d -o $(PRIVATE_general_tests_configs_zip) \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index ed2f2a6..733a2e2 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -41,7 +41,7 @@
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
+	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list -sha256
 	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
 
 host-unit-tests: $(host_unit_tests_zip)
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index dbd1e84..e83d408 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -41,3 +41,9 @@
 
 $(call dist-for-goals, general-tests, $(MODULE_INFO_JSON))
 $(call dist-for-goals, droidcore-unbundled, $(MODULE_INFO_JSON))
+
+# On every build, generate an all_modules.txt file to be used for autocompleting
+# the m command. After timing this using $(shell date +"%s.%3N"), it only adds
+# 0.01 seconds to the internal master build, and will only rerun on builds that
+# rerun kati.
+$(file >$(PRODUCT_OUT)/all_modules.txt,$(subst $(space),$(newline),$(ALL_MODULES)))
diff --git a/core/tasks/owners.mk b/core/tasks/owners.mk
index 806b8ee..29f3c44 100644
--- a/core/tasks/owners.mk
+++ b/core/tasks/owners.mk
@@ -11,7 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-# Create an artifact to include TEST_MAPPING files in source tree.
+# Create an artifact to include OWNERS files in source tree.
 
 .PHONY: owners
 
diff --git a/core/tasks/sts-lite.mk b/core/tasks/sts-lite.mk
new file mode 100644
index 0000000..dee25d4
--- /dev/null
+++ b/core/tasks/sts-lite.mk
@@ -0,0 +1,40 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+ifneq ($(wildcard test/sts/README-sts-sdk.md),)
+test_suite_name := sts-lite
+test_suite_tradefed := sts-tradefed
+test_suite_readme := test/sts/README-sts-sdk.md
+sts_sdk_zip := $(HOST_OUT)/$(test_suite_name)/sts-sdk.zip
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+sts_sdk_samples := $(call intermediates-dir-for,ETC,sts-sdk-samples.zip)/sts-sdk-samples.zip
+
+$(sts_sdk_zip): STS_LITE_ZIP := $(compatibility_zip)
+$(sts_sdk_zip): STS_SDK_SAMPLES := $(sts_sdk_samples)
+$(sts_sdk_zip): $(MERGE_ZIPS) $(ZIP2ZIP) $(compatibility_zip) $(sts_sdk_samples)
+	rm -f $@ $(STS_LITE_ZIP)_filtered
+	$(ZIP2ZIP) -i $(STS_LITE_ZIP) -o $(STS_LITE_ZIP)_filtered \
+		-x android-sts-lite/tools/sts-tradefed-tests.jar \
+		'android-sts-lite/tools/*:sts-test/libs/' \
+		'android-sts-lite/testcases/*:sts-test/utils/'
+	$(MERGE_ZIPS) $@ $(STS_LITE_ZIP)_filtered $(STS_SDK_SAMPLES)
+	rm -f $(STS_LITE_ZIP)_filtered
+
+.PHONY: sts-sdk
+sts-sdk: $(sts_sdk_zip)
+$(call dist-for-goals, sts-sdk, $(sts_sdk_zip))
+
+endif
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
index 0b0c93c..eb2a585 100644
--- a/core/tasks/test_mapping.mk
+++ b/core/tasks/test_mapping.mk
@@ -21,17 +21,17 @@
 intermediates := $(call intermediates-dir-for,PACKAGING,test_mapping)
 test_mappings_zip := $(intermediates)/test_mappings.zip
 test_mapping_list := $(OUT_DIR)/.module_paths/TEST_MAPPING.list
-test_mappings := $(file <$(test_mapping_list))
-$(test_mappings_zip) : PRIVATE_test_mappings := $(subst $(newline),\n,$(test_mappings))
 $(test_mappings_zip) : PRIVATE_all_disabled_presubmit_tests := $(ALL_DISABLED_PRESUBMIT_TESTS)
+$(test_mappings_zip) : PRIVATE_test_mapping_list := $(test_mapping_list)
 
-$(test_mappings_zip) : $(test_mappings) $(SOONG_ZIP)
+$(test_mappings_zip) : .KATI_DEPFILE := $(test_mappings_zip).d
+$(test_mappings_zip) : $(test_mapping_list) $(SOONG_ZIP)
 	@echo "Building artifact to include TEST_MAPPING files and tests to skip in presubmit check."
 	rm -rf $@ $(dir $@)/disabled-presubmit-tests
 	echo $(sort $(PRIVATE_all_disabled_presubmit_tests)) | tr " " "\n" > $(dir $@)/disabled-presubmit-tests
-	echo -e "$(PRIVATE_test_mappings)" > $@.list
-	$(SOONG_ZIP) -o $@ -C . -l $@.list -C $(dir $@) -f $(dir $@)/disabled-presubmit-tests
-	rm -f $@.list $(dir $@)/disabled-presubmit-tests
+	$(SOONG_ZIP) -o $@ -C . -l $(PRIVATE_test_mapping_list) -C $(dir $@) -f $(dir $@)/disabled-presubmit-tests
+	echo "$@ : " $$(cat $(PRIVATE_test_mapping_list)) > $@.d
+	rm -f $(dir $@)/disabled-presubmit-tests
 
 test_mapping : $(test_mappings_zip)
 
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index a5f162a..9400890 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -51,7 +51,7 @@
 $(test_suite_jdk): PRIVATE_SUBDIR := $(test_suite_subdir)
 $(test_suite_jdk): $(shell find $(test_suite_jdk_dir) -type f | sort)
 $(test_suite_jdk): $(SOONG_ZIP)
-	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR)
+	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR) -sha256
 
 $(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,permissive,\
   $(test_suite_jdk_dir)/legal/java.base/LICENSE,JDK,prebuilts/jdk/$(notdir $(patsubst %/,%,$(dir $(test_suite_jdk_dir)))))
@@ -123,7 +123,7 @@
 	cp $(PRIVATE_TOOLS) $(PRIVATE_OUT_DIR)/tools
 	$(if $(PRIVATE_DYNAMIC_CONFIG),$(hide) cp $(PRIVATE_DYNAMIC_CONFIG) $(PRIVATE_OUT_DIR)/testcases/$(PRIVATE_SUITE_NAME).dynamic)
 	find $(PRIVATE_RESOURCES) | sort >$@.list
-	$(SOONG_ZIP) -d -o $@.tmp -C $(dir $@) -l $@.list
+	$(SOONG_ZIP) -d -o $@.tmp -C $(dir $@) -l $@.list -sha256
 	$(MERGE_ZIPS) $@ $@.tmp $(PRIVATE_JDK)
 	rm -f $@.tmp
 # Build a list of tests
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index c41aec5..b15df28 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -50,12 +50,12 @@
   $(error done)
 endif
 
-my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(shell$(call echo-warning,$(my_makefile),$(my_package_name): Some necessary modules may have been skipped by Soong. Check if PRODUCT_SOURCE_ROOT_DIRS is pruning necessary Android.bp files.))
 ifeq ($(ALLOW_MISSING_DEPENDENCIES),true)
   # Ignore unknown installed files on partial builds
   my_missing_files =
 else ifneq ($(my_modules_strict),false)
-  my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(eval my_missing_error := true)
+  my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(shell$(call echo-warning,$(my_makefile),$(my_package_name): Some necessary modules may have been skipped by Soong. Check if PRODUCT_SOURCE_ROOT_DIRS is pruning necessary Android.bp files.))$(eval my_missing_error := true)
 endif
 
 # Iterate over modules' built files and installed files;
diff --git a/core/tasks/wvts.mk b/core/tasks/wvts.mk
new file mode 100644
index 0000000..a79f613
--- /dev/null
+++ b/core/tasks/wvts.mk
@@ -0,0 +1,30 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Widevine test suite for non-GMS partners: go/android-wvts
+ifneq ($(wildcard test/wvts/tools/wvts-tradefed/README),)
+test_suite_name := wvts
+test_suite_tradefed := wvts-tradefed
+test_suite_dynamic_config := test/wvts/tools/wvts-tradefed/DynamicConfig.xml
+test_suite_readme := test/wvts/tools/wvts-tradefed/README
+
+$(call declare-1p-target,$(test_suite_dynamic_config),wvts)
+$(call declare-1p-target,$(test_suite_readme),wvts)
+
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
+
+.PHONY: wvts
+wvts: $(compatibility_zip) $(compatibility_tests_list_zip)
+$(call dist-for-goals, wvts, $(compatibility_zip) $(compatibility_tests_list_zip))
+endif
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index a7d023f..b160648 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -43,7 +43,7 @@
 DEFAULT_PLATFORM_VERSION := UP1A
 .KATI_READONLY := DEFAULT_PLATFORM_VERSION
 MIN_PLATFORM_VERSION := UP1A
-MAX_PLATFORM_VERSION := UP1A
+MAX_PLATFORM_VERSION := VP1A
 
 # The last stable version name of the platform that was released.  During
 # development, this stays at that previous version, while the codename indicates
@@ -54,6 +54,7 @@
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
 PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
+PLATFORM_VERSION_CODENAME.VP1A := VanillaIceCream
 
 # This is the user-visible version.  In a final release build it should
 # be empty to use PLATFORM_VERSION as the user-visible version.  For
@@ -103,7 +104,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2022-10-05
+    PLATFORM_SECURITY_PATCH := 2023-04-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/version_util.mk b/core/version_util.mk
index cbfef96..47883d8 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -80,13 +80,28 @@
   $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
     $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
 
+# And the list of actually all the codenames that are in preview. The
+# ALL_CODENAMES variable is sort of a lie for historical reasons and only
+# includes codenames up to and including the currently active codename, whereas
+# this variable also includes future codenames. For example, while AOSP is still
+# merging into U, but V development has started, ALL_CODENAMES will only be U,
+# but ALL_PREVIEW_CODENAMES will be U and V.
+PLATFORM_VERSION_ALL_PREVIEW_CODENAMES :=
+$(foreach version,$(ALL_VERSIONS),\
+  $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+  $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)),,\
+    $(eval PLATFORM_VERSION_ALL_PREVIEW_CODENAMES += $(_codename))))
+
 # And convert from space separated to comma separated.
 PLATFORM_VERSION_ALL_CODENAMES := \
   $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+PLATFORM_VERSION_ALL_PREVIEW_CODENAMES := \
+  $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)))
 
 .KATI_READONLY := \
   PLATFORM_VERSION_CODENAME \
-  PLATFORM_VERSION_ALL_CODENAMES
+  PLATFORM_VERSION_ALL_CODENAMES \
+  PLATFORM_VERSION_ALL_PREVIEW_CODENAMES \
 
 ifneq (REL,$(PLATFORM_VERSION_CODENAME))
   codenames := \
diff --git a/envsetup.sh b/envsetup.sh
index 3674a4a..905635c 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1,3 +1,55 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# gettop is duplicated here and in shell_utils.mk, because it's difficult
+# to find shell_utils.make without it for all the novel ways this file can be
+# sourced.  Other common functions should only be in one place or the other.
+function _gettop_once
+{
+    local TOPFILE=build/make/core/envsetup.mk
+    if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
+        # The following circumlocution ensures we remove symlinks from TOP.
+        (cd "$TOP"; PWD= /bin/pwd)
+    else
+        if [ -f $TOPFILE ] ; then
+            # The following circumlocution (repeated below as well) ensures
+            # that we record the true directory name and not one that is
+            # faked up with symlink names.
+            PWD= /bin/pwd
+        else
+            local HERE=$PWD
+            local T=
+            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
+                \cd ..
+                T=`PWD= /bin/pwd -P`
+            done
+            \cd "$HERE"
+            if [ -f "$T/$TOPFILE" ]; then
+                echo "$T"
+            fi
+        fi
+    fi
+}
+T=$(_gettop_once)
+if [ ! "$T" ]; then
+    echo "Couldn't locate the top of the tree. Always source build/envsetup.sh from the root of the tree." >&2
+    return 1
+fi
+IMPORTING_ENVSETUP=true source $T/build/make/shell_utils.sh
+
+
+# Help
 function hmm() {
 cat <<EOF
 
@@ -27,6 +79,7 @@
 - ggrep:      Greps on all local Gradle files.
 - gogrep:     Greps on all local Go files.
 - jgrep:      Greps on all local Java files.
+- jsongrep:   Greps on all local Json files.
 - ktgrep:     Greps on all local Kotlin files.
 - resgrep:    Greps on all local res/*.xml files.
 - mangrep:    Greps on all local AndroidManifest.xml files.
@@ -35,6 +88,8 @@
 - rsgrep:     Greps on all local Rust files.
 - sepgrep:    Greps on all local sepolicy files.
 - sgrep:      Greps on all local source files.
+- tomlgrep:   Greps on all local Toml files.
+- pygrep:     Greps on all local Python files.
 - godir:      Go to the directory containing a file.
 - allmod:     List all modules.
 - gomod:      Go to the directory containing a module.
@@ -173,7 +228,10 @@
     return 1
 }
 
-function setpaths()
+
+# Add directories to PATH that are dependent on the lunch target.
+# For directories that are not lunch-specific, add them in set_global_paths
+function set_lunch_paths()
 {
     local T=$(gettop)
     if [ ! "$T" ]; then
@@ -185,96 +243,80 @@
     #                                                                #
     #              Read me before you modify this code               #
     #                                                                #
-    #   This function sets ANDROID_BUILD_PATHS to what it is adding  #
-    #   to PATH, and the next time it is run, it removes that from   #
-    #   PATH.  This is required so lunch can be run more than once   #
-    #   and still have working paths.                                #
+    #   This function sets ANDROID_LUNCH_BUILD_PATHS to what it is   #
+    #   adding to PATH, and the next time it is run, it removes that #
+    #   from PATH.  This is required so lunch can be run more than   #
+    #   once and still have working paths.                           #
     #                                                                #
     ##################################################################
 
-    # Note: on windows/cygwin, ANDROID_BUILD_PATHS will contain spaces
+    # Note: on windows/cygwin, ANDROID_LUNCH_BUILD_PATHS will contain spaces
     # due to "C:\Program Files" being in the path.
 
-    # out with the old
-    if [ -n "$ANDROID_BUILD_PATHS" ] ; then
-        export PATH=${PATH/$ANDROID_BUILD_PATHS/}
+    # Handle compat with the old ANDROID_BUILD_PATHS variable. 
+    # TODO: Remove this after we think everyone has lunched again.
+    if [ -z "$ANDROID_LUNCH_BUILD_PATHS" -a -n "$ANDROID_BUILD_PATHS" ] ; then
+      ANDROID_LUNCH_BUILD_PATHS="$ANDROID_BUILD_PATHS"
+      ANDROID_BUILD_PATHS=
     fi
     if [ -n "$ANDROID_PRE_BUILD_PATHS" ] ; then
         export PATH=${PATH/$ANDROID_PRE_BUILD_PATHS/}
         # strip leading ':', if any
         export PATH=${PATH/:%/}
+        ANDROID_PRE_BUILD_PATHS=
     fi
 
-    # and in with the new
+    # Out with the old...
+    if [ -n "$ANDROID_LUNCH_BUILD_PATHS" ] ; then
+        export PATH=${PATH/$ANDROID_LUNCH_BUILD_PATHS/}
+    fi
 
-    export ANDROID_DEV_SCRIPTS=$T/development/scripts:$T/prebuilts/devtools/tools
+    # And in with the new...
+    ANDROID_LUNCH_BUILD_PATHS=$(get_abs_build_var SOONG_HOST_OUT_EXECUTABLES)
+    ANDROID_LUNCH_BUILD_PATHS+=:$(get_abs_build_var HOST_OUT_EXECUTABLES)
 
-    # add kernel specific binaries
-    case $(uname -s) in
-        Linux)
-            export ANDROID_DEV_SCRIPTS=$ANDROID_DEV_SCRIPTS:$T/prebuilts/misc/linux-x86/dtc:$T/prebuilts/misc/linux-x86/libufdt
-            ;;
-        *)
-            ;;
-    esac
-
-    ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS)
-    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_DEV_SCRIPTS
-
-    # Append llvm binutils prebuilts path to ANDROID_BUILD_PATHS.
+    # Append llvm binutils prebuilts path to ANDROID_LUNCH_BUILD_PATHS.
     local ANDROID_LLVM_BINUTILS=$(get_abs_build_var ANDROID_CLANG_PREBUILTS)/llvm-binutils-stable
-    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_LLVM_BINUTILS
+    ANDROID_LUNCH_BUILD_PATHS+=:$ANDROID_LLVM_BINUTILS
 
     # Set up ASAN_SYMBOLIZER_PATH for SANITIZE_HOST=address builds.
     export ASAN_SYMBOLIZER_PATH=$ANDROID_LLVM_BINUTILS/llvm-symbolizer
 
-    # If prebuilts/android-emulator/<system>/ exists, prepend it to our PATH
-    # to ensure that the corresponding 'emulator' binaries are used.
-    case $(uname -s) in
-        Darwin)
-            ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/darwin-x86_64
-            ;;
-        Linux)
-            ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/linux-x86_64
-            ;;
-        *)
-            ANDROID_EMULATOR_PREBUILTS=
-            ;;
-    esac
-    if [ -n "$ANDROID_EMULATOR_PREBUILTS" -a -d "$ANDROID_EMULATOR_PREBUILTS" ]; then
-        ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ANDROID_EMULATOR_PREBUILTS
-        export ANDROID_EMULATOR_PREBUILTS
-    fi
-
-    # Append asuite prebuilts path to ANDROID_BUILD_PATHS.
+    # Append asuite prebuilts path to ANDROID_LUNCH_BUILD_PATHS.
     local os_arch=$(get_build_var HOST_PREBUILT_TAG)
-    local ACLOUD_PATH="$T/prebuilts/asuite/acloud/$os_arch"
-    local AIDEGEN_PATH="$T/prebuilts/asuite/aidegen/$os_arch"
-    local ATEST_PATH="$T/prebuilts/asuite/atest/$os_arch"
-    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$ACLOUD_PATH:$AIDEGEN_PATH:$ATEST_PATH
-
-    # Build system
-    ANDROID_BUILD_PATHS=$ANDROID_BUILD_PATHS:$T/build/bazel/bin
-
-    export ANDROID_BUILD_PATHS=$(tr -s : <<<"${ANDROID_BUILD_PATHS}:")
-    export PATH=$ANDROID_BUILD_PATHS$PATH
-
-    # out with the duplicate old
-    if [ -n $ANDROID_PYTHONPATH ]; then
-        export PYTHONPATH=${PYTHONPATH//$ANDROID_PYTHONPATH/}
-    fi
-    # and in with the new
-    export ANDROID_PYTHONPATH=$T/development/python-packages:
-    if [ -n $VENDOR_PYTHONPATH  ]; then
-        ANDROID_PYTHONPATH=$ANDROID_PYTHONPATH$VENDOR_PYTHONPATH
-    fi
-    export PYTHONPATH=$ANDROID_PYTHONPATH$PYTHONPATH
+    ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/acloud/$os_arch
+    ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/aidegen/$os_arch
+    ANDROID_LUNCH_BUILD_PATHS+=:$T/prebuilts/asuite/atest/$os_arch
 
     export ANDROID_JAVA_HOME=$(get_abs_build_var ANDROID_JAVA_HOME)
     export JAVA_HOME=$ANDROID_JAVA_HOME
     export ANDROID_JAVA_TOOLCHAIN=$(get_abs_build_var ANDROID_JAVA_TOOLCHAIN)
-    export ANDROID_PRE_BUILD_PATHS=$ANDROID_JAVA_TOOLCHAIN:
-    export PATH=$ANDROID_PRE_BUILD_PATHS$PATH
+    ANDROID_LUNCH_BUILD_PATHS+=:$ANDROID_JAVA_TOOLCHAIN
+
+    # Fix up PYTHONPATH
+    if [ -n $ANDROID_PYTHONPATH ]; then
+        export PYTHONPATH=${PYTHONPATH//$ANDROID_PYTHONPATH/}
+    fi
+    # //development/python-packages contains both a pseudo-PYTHONPATH which
+    # mimics an already assembled venv, but also contains real Python packages
+    # that are not in that layout until they are installed. We can fake it for
+    # the latter type by adding the package source directories to the PYTHONPATH
+    # directly. For the former group, we only need to add the python-packages
+    # directory itself.
+    #
+    # This could be cleaned up by converting the remaining packages that are in
+    # the first category into a typical python source layout (that is, another
+    # layer of directory nesting) and automatically adding all subdirectories of
+    # python-packages to the PYTHONPATH instead of manually curating this. We
+    # can't convert the packages like adb to the other style because doing so
+    # would prevent exporting type info from those packages.
+    #
+    # http://b/266688086
+    export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages:
+    if [ -n $VENDOR_PYTHONPATH ]; then
+        ANDROID_PYTHONPATH=$ANDROID_PYTHONPATH$VENDOR_PYTHONPATH
+    fi
+    export PYTHONPATH=$ANDROID_PYTHONPATH$PYTHONPATH
 
     unset ANDROID_PRODUCT_OUT
     export ANDROID_PRODUCT_OUT=$(get_abs_build_var PRODUCT_OUT)
@@ -292,9 +334,67 @@
     unset ANDROID_TARGET_OUT_TESTCASES
     export ANDROID_TARGET_OUT_TESTCASES=$(get_abs_build_var TARGET_OUT_TESTCASES)
 
-    # needed for building linux on MacOS
-    # TODO: fix the path
-    #export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include
+    # Finally, set PATH
+    export PATH=$ANDROID_LUNCH_BUILD_PATHS:$PATH
+}
+
+# Add directories to PATH that are NOT dependent on the lunch target.
+# For directories that are lunch-specific, add them in set_lunch_paths
+function set_global_paths()
+{
+    local T=$(gettop)
+    if [ ! "$T" ]; then
+        echo "Couldn't locate the top of the tree.  Try setting TOP."
+        return
+    fi
+
+    ##################################################################
+    #                                                                #
+    #              Read me before you modify this code               #
+    #                                                                #
+    #   This function sets ANDROID_GLOBAL_BUILD_PATHS to what it is  #
+    #   adding to PATH, and the next time it is run, it removes that #
+    #   from PATH.  This is required so envsetup.sh can be sourced   #
+    #   more than once and still have working paths.                 #
+    #                                                                #
+    ##################################################################
+
+    # Out with the old...
+    if [ -n "$ANDROID_GLOBAL_BUILD_PATHS" ] ; then
+        export PATH=${PATH/$ANDROID_GLOBAL_BUILD_PATHS/}
+    fi
+
+    # And in with the new...
+    ANDROID_GLOBAL_BUILD_PATHS=$T/build/bazel/bin
+    ANDROID_GLOBAL_BUILD_PATHS+=:$T/development/scripts
+    ANDROID_GLOBAL_BUILD_PATHS+=:$T/prebuilts/devtools/tools
+
+    # add kernel specific binaries
+    if [ $(uname -s) = Linux ] ; then
+        ANDROID_GLOBAL_BUILD_PATHS+=:$T/prebuilts/misc/linux-x86/dtc
+        ANDROID_GLOBAL_BUILD_PATHS+=:$T/prebuilts/misc/linux-x86/libufdt
+    fi
+
+    # If prebuilts/android-emulator/<system>/ exists, prepend it to our PATH
+    # to ensure that the corresponding 'emulator' binaries are used.
+    case $(uname -s) in
+        Darwin)
+            ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/darwin-x86_64
+            ;;
+        Linux)
+            ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/linux-x86_64
+            ;;
+        *)
+            ANDROID_EMULATOR_PREBUILTS=
+            ;;
+    esac
+    if [ -n "$ANDROID_EMULATOR_PREBUILTS" -a -d "$ANDROID_EMULATOR_PREBUILTS" ]; then
+        ANDROID_GLOBAL_BUILD_PATHS+=:$ANDROID_EMULATOR_PREBUILTS
+        export ANDROID_EMULATOR_PREBUILTS
+    fi
+
+    # Finally, set PATH
+    export PATH=$ANDROID_GLOBAL_BUILD_PATHS:$PATH
 }
 
 function printconfig()
@@ -309,7 +409,7 @@
 
 function set_stuff_for_environment()
 {
-    setpaths
+    set_lunch_paths
     set_sequence_number
 
     export ANDROID_BUILD_TOP=$(gettop)
@@ -413,9 +513,7 @@
     # Lunch must be run in the topdir, but this way we get a clear error
     # message, instead of FileNotFound.
     local T=$(multitree_gettop)
-    if [ -n "$T" ]; then
-      "$T/orchestrator/build/orchestrator/core/lunch.py" "$@"
-    else
+    if [ -z "$T" ]; then
       _multitree_lunch_error
       return 1
     fi
@@ -899,33 +997,6 @@
     destroy_build_var_cache
 }
 
-function gettop
-{
-    local TOPFILE=build/make/core/envsetup.mk
-    if [ -n "$TOP" -a -f "$TOP/$TOPFILE" ] ; then
-        # The following circumlocution ensures we remove symlinks from TOP.
-        (cd "$TOP"; PWD= /bin/pwd)
-    else
-        if [ -f $TOPFILE ] ; then
-            # The following circumlocution (repeated below as well) ensures
-            # that we record the true directory name and not one that is
-            # faked up with symlink names.
-            PWD= /bin/pwd
-        else
-            local HERE=$PWD
-            local T=
-            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
-                \cd ..
-                T=`PWD= /bin/pwd -P`
-            done
-            \cd "$HERE"
-            if [ -f "$T/$TOPFILE" ]; then
-                echo "$T"
-            fi
-        fi
-    fi
-}
-
 # TODO: Merge into gettop as part of launching multitree
 function multitree_gettop
 {
@@ -1139,7 +1210,7 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto)' \
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto|rs|go)' \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -1147,7 +1218,7 @@
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\|rs\|go\)' \
                 -exec grep --color -n "$@" {} +
         }
         ;;
@@ -1182,6 +1253,18 @@
         -exec grep --color -n "$@" {} +
 }
 
+function jsongrep()
+{
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.json" \
+        -exec grep --color -n "$@" {} +
+}
+
+function tomlgrep()
+{
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.toml" \
+        -exec grep --color -n "$@" {} +
+}
+
 function ktgrep()
 {
     find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.kt" \
@@ -1226,6 +1309,12 @@
         -exec grep --color -n "$@" {} +
 }
 
+function pygrep()
+{
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.py" \
+        -exec grep --color -n "$@" {} +
+}
+
 case `uname -s` in
     Darwin)
         function mgrep()
@@ -1551,12 +1640,10 @@
     fi
 }
 
-# List all modules for the current device, as cached in module-info.json. If any build change is
-# made and it should be reflected in the output, you should run 'refreshmod' first.
+# List all modules for the current device, as cached in all_modules.txt. If any build change is
+# made and it should be reflected in the output, you should run `m nothing` first.
 function allmod() {
-    verifymodinfo || return 1
-
-    python3 -c "import json; print('\n'.join(sorted(json.load(open('$ANDROID_PRODUCT_OUT/module-info.json')).keys())))"
+    cat $ANDROID_PRODUCT_OUT/all_modules.txt 2>/dev/null
 }
 
 # Return the Bazel label of a Soong module if it is converted with bp2build.
@@ -1735,7 +1822,7 @@
 
 function _complete_android_module_names() {
     local word=${COMP_WORDS[COMP_CWORD]}
-    COMPREPLY=( $(QUIET_VERIFYMODINFO=true allmod | grep -E "^$word") )
+    COMPREPLY=( $(allmod | grep -E "^$word") )
 }
 
 # Print colored exit condition
@@ -1794,11 +1881,6 @@
         color_reset=""
     fi
 
-    if [[ "x${USE_RBE}" == "x" && $mins -gt 15 && "${ANDROID_BUILD_ENVIRONMENT_CONFIG}" == "googler" ]]; then
-        echo
-        echo "${color_warning}Start using RBE (http://go/build-fast) to get faster builds!${color_reset}"
-    fi
-
     echo
     if [ $ret -eq 0 ] ; then
         echo -n "${color_success}#### build completed successfully "
@@ -1999,5 +2081,7 @@
 }
 
 validate_current_shell
+set_global_paths
 source_vendorsetup
 addcompletions
+
diff --git a/finalize-aidl-vndk-sdk-resources.sh b/finalize-aidl-vndk-sdk-resources.sh
deleted file mode 100755
index 8e12c49..0000000
--- a/finalize-aidl-vndk-sdk-resources.sh
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_aidl_vndk_sdk_resources() {
-    local top="$(dirname "$0")"/../..
-
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # This script is WIP and only finalizes part of the Android branch for release.
-    # The full process can be found at (INTERNAL) go/android-sdk-finalization.
-
-    # VNDK snapshot (TODO)
-    # SDK snapshots (TODO)
-    # Update references in the codebase to new API version (TODO)
-    # ...
-
-    AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
-
-    # Generate ABI dumps
-    ANDROID_BUILD_TOP="$top" \
-        out/host/linux-x86/bin/create_reference_dumps \
-        -p aosp_arm64 --build-variant user
-
-    echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
-    # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
-    $m check-vndk-list || \
-        { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
-    echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
-
-    # Finalize resources
-    "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
-           "$top/frameworks/base/core/res/res/values/public-staging.xml" \
-           "$top/frameworks/base/core/res/res/values/public-final.xml"
-
-    # SDK finalization
-    local sdk_codename='public static final int UPSIDE_DOWN_CAKE = CUR_DEVELOPMENT;'
-    local sdk_version='public static final int UPSIDE_DOWN_CAKE = 34;'
-    local sdk_build="$top/frameworks/base/core/java/android/os/Build.java"
-
-    sed -i "s%$sdk_codename%$sdk_version%g" $sdk_build
-
-    # Force update current.txt
-    $m clobber
-    $m update-api
-}
-
-finalize_aidl_vndk_sdk_resources
-
diff --git a/finalize-cleanup.sh b/finalize-cleanup.sh
deleted file mode 100755
index efa2707..0000000
--- a/finalize-cleanup.sh
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/bin/bash
-# Brings local repository to a remote head state.
-
-# set -ex
-
-function finalize_revert_local_changes_main() {
-    local top="$(dirname "$0")"/../..
-
-    repo selfupdate
-
-    repo forall -c '\
-        git checkout . ; git clean -fdx ;\
-        git checkout @ ; git b fina-step1 -D ; git reset --hard; \
-        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;'
-}
-
-finalize_revert_local_changes_main
diff --git a/finalize-step-1.sh b/finalize-step-1.sh
deleted file mode 100755
index 9f87b6c..0000000
--- a/finalize-step-1.sh
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/bin/bash
-# Automation for finalize_branch_for_release.sh.
-# Sets up local environment, runs the finalization script and submits the results.
-# WIP:
-# - does not submit, only sends to gerrit.
-
-# set -ex
-
-function revert_local_changes() {
-    repo forall -c '\
-        git checkout . ; git clean -fdx ;\
-        git checkout @ ; git b fina-step1 -D ; git reset --hard; \
-        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;\
-        previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT)" ;\
-        if [[ $previousHash ]]; then git revert --no-commit $previousHash ; fi ;'
-}
-
-function commit_changes() {
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            repo start fina-step1 ;
-            git add -A . ;
-            git commit -m FINALIZATION_STEP_1_SCRIPT_COMMIT -m WILL_BE_AUTOMATICALLY_REVERTED ;
-            repo upload --cbr --no-verify -t -y . ;
-            git clean -fdx ; git reset --hard ;
-        fi'
-}
-
-function finalize_step_1_main() {
-    local top="$(dirname "$0")"/../..
-
-    repo selfupdate
-
-    revert_local_changes
-
-    # vndk etc finalization
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # move all changes to fina-step1 branch and commit with a robot message
-    commit_changes
-}
-
-finalize_step_1_main
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
deleted file mode 100755
index b46390d..0000000
--- a/finalize_branch_for_release.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_main() {
-    local top="$(dirname "$0")"/../..
-
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # Build finalization artifacts.
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # This command tests:
-    #   The release state for AIDL.
-    #   ABI difference between user and userdebug builds.
-    #   Resource/SDK finalization.
-    # In the future, we would want to actually turn the branch into the REL
-    # state and test with that.
-    AIDL_FROZEN_REL=true $m droidcore
-
-    # Build SDK (TODO)
-    # lunch sdk...
-    # m ...
-}
-
-finalize_main
-
diff --git a/shell_utils.sh b/shell_utils.sh
new file mode 100644
index 0000000..9de5a50
--- /dev/null
+++ b/shell_utils.sh
@@ -0,0 +1,74 @@
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+function gettop
+{
+    local TOPFILE=build/make/core/envsetup.mk
+    # The ${TOP-} expansion allows this to work even with set -u
+    if [ -n "${TOP:-}" -a -f "${TOP:-}/$TOPFILE" ] ; then
+        # The following circumlocution ensures we remove symlinks from TOP.
+        (cd "$TOP"; PWD= /bin/pwd)
+    else
+        if [ -f $TOPFILE ] ; then
+            # The following circumlocution (repeated below as well) ensures
+            # that we record the true directory name and not one that is
+            # faked up with symlink names.
+            PWD= /bin/pwd
+        else
+            local HERE=$PWD
+            local T=
+            while [ \( ! \( -f $TOPFILE \) \) -a \( "$PWD" != "/" \) ]; do
+                \cd ..
+                T=`PWD= /bin/pwd -P`
+            done
+            \cd "$HERE"
+            if [ -f "$T/$TOPFILE" ]; then
+                echo "$T"
+            fi
+        fi
+    fi
+}
+
+# Sets TOP, or if the root of the tree can't be found, prints a message and
+# exits.  Since this function exits, it should not be called from functions
+# defined in envsetup.sh.
+if [ -z "${IMPORTING_ENVSETUP:-}" ] ; then
+function require_top
+{
+    TOP=$(gettop)
+    if [[ ! $TOP ]] ; then
+        echo "Can not locate root of source tree. $(basename $0) must be run from within the Android source tree." >&2
+        exit 1
+    fi
+}
+fi
+
+function getoutdir
+{
+    local top=$(gettop)
+    local out_dir="${OUT_DIR:-}"
+    if [[ -z "${out_dir}" ]]; then
+        if [[ -n "${OUT_DIR_COMMON_BASE:-}" && -n "${top}" ]]; then
+            out_dir="${OUT_DIR_COMMON_BASE}/$(basename ${top})"
+        else
+            out_dir="out"
+        fi
+    fi
+    if [[ "${out_dir}" != /* ]]; then
+        out_dir="${top}/${out_dir}"
+    fi
+    echo "${out_dir}"
+}
+
+
diff --git a/target/OWNERS b/target/OWNERS
deleted file mode 100644
index feb2742..0000000
--- a/target/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-hansson@google.com
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index f6e64a1..6ed08f0 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -26,51 +26,34 @@
 # Emulator doesn't support sparse image format.
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 
-ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
-  # emulator is Non-A/B device
-  AB_OTA_UPDATER := false
+# emulator is Non-A/B device
+AB_OTA_UPDATER := false
 
-  # emulator needs super.img
-  BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
+# emulator needs super.img
+BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
 
-  # 4G + 8M
-  BOARD_SUPER_PARTITION_SIZE := 4303355904
-  BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
+# 8G + 8M
+BOARD_SUPER_PARTITION_SIZE ?= 8598323200
+BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
 
-  ifeq ($(QEMU_USE_SYSTEM_EXT_PARTITIONS),true)
-    BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
-        system \
-        system_ext \
-        product \
-        vendor
+BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
+  system \
+  system_dlkm \
+  system_ext \
+  product \
+  vendor
 
-    TARGET_COPY_OUT_PRODUCT := product
-    BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
-    TARGET_COPY_OUT_SYSTEM_EXT := system_ext
-    BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
-  else
-    TARGET_COPY_OUT_PRODUCT := system/product
-    TARGET_COPY_OUT_SYSTEM_EXT := system/system_ext
-    BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
-        system \
-        vendor
-  endif
+TARGET_COPY_OUT_PRODUCT := product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+TARGET_COPY_OUT_SYSTEM_EXT := system_ext
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
 
-  # 4G
-  BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE := 4294967296
+BOARD_USES_SYSTEM_DLKMIMAGE := true
+BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := erofs
+TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
 
-  # in build environment to speed up make -j
-  ifeq ($(QEMU_DISABLE_AVB),true)
-    BOARD_AVB_ENABLE := false
-  endif
-else ifeq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
-  # Enable dynamic system image size and reserved 64MB in it.
-  BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE := 67108864
-  BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE := 67108864
-else
-  BOARD_SYSTEMIMAGE_PARTITION_SIZE := 3221225472
-  BOARD_VENDORIMAGE_PARTITION_SIZE := 146800640
-endif
+# 8G
+BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE ?= 8589934592
 
 #vendor boot
 BOARD_INCLUDE_DTB_IN_BOOTIMG := false
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 8c634f6..4d95b33 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -3,6 +3,8 @@
 # Common compile-time definitions for GSI
 # Builds upon the mainline config.
 #
+# See device/generic/common/README.md for more details.
+#
 
 include build/make/target/board/BoardConfigMainlineCommon.mk
 
@@ -17,6 +19,12 @@
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 TARGET_USERIMAGES_SPARSE_EROFS_DISABLED := true
 
+# Enable system_dlkm image for creating a symlink in GSI to support
+# the devices with system_dlkm partition
+BOARD_USES_SYSTEM_DLKMIMAGE := true
+BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := ext4
+TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
+
 # GSI also includes make_f2fs to support userdata parition in f2fs
 # for some devices
 TARGET_USERIMAGES_USE_F2FS := true
diff --git a/target/board/generic_64bitonly_x86_64/BoardConfig.mk b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
index 71c4357..a240eab 100644
--- a/target/board/generic_64bitonly_x86_64/BoardConfig.mk
+++ b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
@@ -26,7 +26,10 @@
 TARGET_2ND_ARCH_VARIANT := x86_64
 
 TARGET_PRELINK_MODULE := false
+
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
 include build/make/target/board/BoardConfigEmuCommon.mk
 
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
@@ -43,3 +46,5 @@
 WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
 WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
 WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+
+endif # !BUILDING_GSI
diff --git a/target/board/linux_bionic/BoardConfig.mk b/target/board/linux_bionic/BoardConfig.mk
index 7938bdb..7fca911 100644
--- a/target/board/linux_bionic/BoardConfig.mk
+++ b/target/board/linux_bionic/BoardConfig.mk
@@ -17,7 +17,11 @@
 # (device) target architectures are irrelevant. However, the build system isn't
 # prepared to handle no target architectures at all, so pick something
 # arbitrarily.
-TARGET_ARCH_SUITE := ndk
+TARGET_ARCH := arm
+TARGET_ARCH_VARIANT := armv7-a-neon
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := armeabi-v7a
+TARGET_CPU_ABI2 := armeabi
 
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := x86_64
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 585630b..1e0ce19 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -36,6 +36,7 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm64.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
+    $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
     $(LOCAL_DIR)/aosp_x86.mk \
     $(LOCAL_DIR)/full.mk \
diff --git a/target/product/OWNERS b/target/product/OWNERS
index 61f7d45..008e4a2 100644
--- a/target/product/OWNERS
+++ b/target/product/OWNERS
@@ -5,6 +5,6 @@
 per-file developer_gsi_keys.mk = file:/target/product/gsi/OWNERS
 
 # Android Go
-per-file go_defaults.mk = gkaiser@google.com, rajekumar@google.com
-per-file go_defaults_512.mk = gkaiser@google.com, rajekumar@google.com
-per-file go_defaults_common.mk = gkaiser@google.com, rajekumar@google.com
+per-file go_defaults.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
+per-file go_defaults_512.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
+per-file go_defaults_common.mk = gkaiser@google.com, kushg@google.com, rajekumar@google.com
\ No newline at end of file
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index 4de4e0c..b8ca3aa 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -51,7 +51,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 90acc17..5f200aa 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -49,7 +49,7 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index 518f8b1..0e5d9fe 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -46,7 +46,6 @@
 #
 # All components inherited here go to vendor image
 #
-#$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
 
@@ -60,6 +59,12 @@
 PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
     root/init.zygote64.rc
 
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
 # This build configuration supports 64-bit apps only
 PRODUCT_NAME := aosp_riscv64
 PRODUCT_DEVICE := generic_riscv64
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 7db2c0f..669874e 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -47,7 +47,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index d55866f..deaa3b1 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -56,7 +56,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index f96e068..39ad0d8 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -45,7 +45,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_arm/device.mk)
 
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 96d7b2f..0f5b8a4 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -54,7 +54,6 @@
     com.android.appsearch \
     com.android.btservices \
     com.android.conscrypt \
-    com.android.cronet \
     com.android.extservices \
     com.android.i18n \
     com.android.ipsec \
@@ -66,12 +65,14 @@
     com.android.os.statsd \
     com.android.permission \
     com.android.resolv \
+    com.android.rkpd \
     com.android.neuralnetworks \
     com.android.scheduling \
     com.android.sdkext \
     com.android.tethering \
     com.android.tzdata \
     com.android.uwb \
+    com.android.virt \
     com.android.wifi \
     ContactsProvider \
     content \
@@ -121,6 +122,7 @@
     init_system \
     input \
     installd \
+    IntentResolver \
     ip \
     iptables \
     ip-up-vpn \
@@ -202,7 +204,6 @@
     libvulkan \
     libwilhelm \
     linker \
-    linkerconfig \
     llkd \
     lmkd \
     LocalTransport \
@@ -295,11 +296,9 @@
     system_manifest.xml \
     system_compatibility_matrix.xml \
 
-# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
-ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
-  PRODUCT_PACKAGES += \
-   libclang_rt.hwasan.bootstrap
-endif
+PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
+ libclang_rt.hwasan.bootstrap \
+ libc_hwasan \
 
 # Jacoco agent JARS to be built and installed, if any.
 ifeq ($(EMMA_INSTRUMENT),true)
@@ -382,11 +381,13 @@
     iotop \
     iperf3 \
     iw \
+    libclang_rt.ubsan_standalone \
     logpersist.start \
     logtagd.rc \
     procrank \
     profcollectd \
     profcollectctl \
+    record_binder \
     servicedispatcher \
     showmap \
     sqlite3 \
diff --git a/target/product/base_vendor.mk b/target/product/base_vendor.mk
index 7fb785c..3c4d62e 100644
--- a/target/product/base_vendor.mk
+++ b/target/product/base_vendor.mk
@@ -46,7 +46,7 @@
 
 # Base modules and settings for the vendor partition.
 PRODUCT_PACKAGES += \
-    android.hardware.cas@1.2-service \
+    android.hardware.cas-service.example \
     boringssl_self_test_vendor \
     dumpsys_vendor \
     fs_config_files_nonsystem \
@@ -81,7 +81,11 @@
 
 endif
 
-# Base module when shipping api level is less than or equal to 29
+# Base modules when shipping api level is less than or equal to 33
+PRODUCT_PACKAGES_SHIPPING_API_LEVEL_33 += \
+    android.hardware.cas@1.2-service \
+
+# Base modules when shipping api level is less than or equal to 29
 PRODUCT_PACKAGES_SHIPPING_API_LEVEL_29 += \
     android.hardware.configstore@1.1-service \
     vndservice \
diff --git a/target/product/cfi-common.mk b/target/product/cfi-common.mk
index 3aa2be7..11c01a2 100644
--- a/target/product/cfi-common.mk
+++ b/target/product/cfi-common.mk
@@ -26,8 +26,10 @@
     frameworks/av/services \
     frameworks/minikin \
     hardware/broadcom/wlan/bcmdhd/wpa_supplicant_8_lib \
+    hardware/synaptics/wlan/synadhd/wpa_supplicant_8_lib \
     hardware/interfaces/nfc \
-    hardware/qcom/wlan/qcwcn/wpa_supplicant_8_lib \
+    hardware/qcom/wlan/legacy/qcwcn/wpa_supplicant_8_lib \
+    hardware/qcom/wlan/wcn6740/qcwcn/wpa_supplicant_8_lib \
     hardware/interfaces/keymaster \
     hardware/interfaces/security \
     packages/modules/Bluetooth/system \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 901302e..752b199 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -70,6 +70,7 @@
     com.android.tethering:framework-connectivity-t \
     com.android.tethering:framework-tethering \
     com.android.uwb:framework-uwb \
+    com.android.virt:framework-virtualization \
     com.android.wifi:framework-wifi \
 
 # List of system_server classpath jars delivered via apex.
@@ -82,6 +83,7 @@
     com.android.art:service-art \
     com.android.media:service-media-s \
     com.android.permission:service-permission \
+    com.android.rkpd:service-rkp \
 
 # Use $(wildcard) to avoid referencing the profile in thin manifests that don't have the
 # art project.
@@ -112,3 +114,5 @@
     dalvik.vm.image-dex2oat-Xmx=64m \
     dalvik.vm.dex2oat-Xms=64m \
     dalvik.vm.dex2oat-Xmx=512m \
+
+PRODUCT_ENABLE_UFFD_GC := false  # TODO(jiakaiz): Change this to "default".
diff --git a/target/product/full.mk b/target/product/full.mk
index 782280d..945957f 100644
--- a/target/product/full.mk
+++ b/target/product/full.mk
@@ -19,7 +19,7 @@
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
 
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic/device.mk)
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
new file mode 100644
index 0000000..d47c685
--- /dev/null
+++ b/target/product/fullmte.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Enables more comprehensive detection of memory errors on hardware that
+# supports the ARM Memory Tagging Extension (MTE), by building the image with
+# MTE stack instrumentation and forcing MTE on in SYNC mode in all processes.
+# For more details, see:
+# https://source.android.com/docs/security/test/memory-safety/arm-mte
+ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),)
+  SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack)
+  SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
+endif
+PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
diff --git a/target/product/generic_ramdisk.mk b/target/product/generic_ramdisk.mk
index c7dcd60..ebac62f 100644
--- a/target/product/generic_ramdisk.mk
+++ b/target/product/generic_ramdisk.mk
@@ -29,11 +29,16 @@
     adb_debug.prop \
     userdebug_plat_sepolicy.cil \
 
+
+# For targets using dedicated recovery partition, generic ramdisk
+# might be relocated to recovery partition
 _my_paths := \
     $(TARGET_COPY_OUT_RAMDISK)/ \
     $(TARGET_COPY_OUT_DEBUG_RAMDISK)/ \
     system/usr/share/zoneinfo/tz_version \
     system/usr/share/zoneinfo/tzdata \
+    $(TARGET_COPY_OUT_RECOVERY)/root/first_stage_ramdisk/system \
+
 
 # We use the "relaxed" version here because tzdata / tz_version is only produced
 # by this makefile on a subset of devices.
diff --git a/target/product/go_defaults_common.mk b/target/product/go_defaults_common.mk
index 7f19615..51a1ef6 100644
--- a/target/product/go_defaults_common.mk
+++ b/target/product/go_defaults_common.mk
@@ -36,12 +36,6 @@
 # Do not generate libartd.
 PRODUCT_ART_TARGET_INCLUDE_DEBUG_BUILD := false
 
-# Do not spin up a separate process for the network stack on go devices, use an in-process APK.
-PRODUCT_PACKAGES += InProcessNetworkStack
-PRODUCT_PACKAGES += CellBroadcastAppPlatform
-PRODUCT_PACKAGES += CellBroadcastServiceModulePlatform
-PRODUCT_PACKAGES += com.android.tethering.inprocess
-
 # Strip the local variable table and the local variable type table to reduce
 # the size of the system image. This has no bearing on stack traces, but will
 # leave less information available via JDWP.
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index d02dc7a..107c94f 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -126,8 +126,13 @@
 endef
 
 VNDK_ABI_DUMP_DIR := prebuilts/abi-dumps/vndk/$(PLATFORM_VNDK_VERSION)
-NDK_ABI_DUMP_DIR := prebuilts/abi-dumps/ndk/$(PLATFORM_VNDK_VERSION)
-PLATFORM_ABI_DUMP_DIR := prebuilts/abi-dumps/platform/$(PLATFORM_VNDK_VERSION)
+ifeq (REL,$(PLATFORM_VERSION_CODENAME))
+    NDK_ABI_DUMP_DIR := prebuilts/abi-dumps/ndk/$(PLATFORM_SDK_VERSION)
+    PLATFORM_ABI_DUMP_DIR := prebuilts/abi-dumps/platform/$(PLATFORM_SDK_VERSION)
+else
+    NDK_ABI_DUMP_DIR := prebuilts/abi-dumps/ndk/current
+    PLATFORM_ABI_DUMP_DIR := prebuilts/abi-dumps/platform/current
+endif
 VNDK_ABI_DUMPS := $(call find-abi-dump-paths,$(VNDK_ABI_DUMP_DIR))
 NDK_ABI_DUMPS := $(call find-abi-dump-paths,$(NDK_ABI_DUMP_DIR))
 PLATFORM_ABI_DUMPS := $(call find-abi-dump-paths,$(PLATFORM_ABI_DUMP_DIR))
@@ -141,7 +146,7 @@
 $(check-vndk-abi-dump-list-timestamp): PRIVATE_STUB_LIBRARIES := $(STUB_LIBRARIES)
 $(check-vndk-abi-dump-list-timestamp):
 	$(eval added_vndk_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-paths,LLNDK VNDK-SP VNDK-core,$(PRIVATE_LSDUMP_PATHS)), \
+	  $(call filter-abi-dump-paths,VNDK-SP VNDK-core,$(PRIVATE_LSDUMP_PATHS)), \
 	  $(notdir $(VNDK_ABI_DUMPS))))))
 	$(if $(added_vndk_abi_dumps), \
 	  echo -e "Found unexpected ABI reference dump files under $(VNDK_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(VNDK_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_vndk_abi_dumps)) ')' -delete\` to delete the dump files.")
@@ -154,7 +159,7 @@
 	  echo -e "Found unexpected ABI reference dump files under $(NDK_ABI_DUMP_DIR). It is caused by mismatch between Android.bp and the dump files. Run \`find \$${ANDROID_BUILD_TOP}/$(NDK_ABI_DUMP_DIR) '(' -name $(subst $(space), -or -name ,$(added_ndk_abi_dumps)) ')' -delete\` to delete the dump files.")
 
 	$(eval added_platform_abi_dumps := $(strip $(sort $(filter-out \
-	  $(call filter-abi-dump-paths,PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
+	  $(call filter-abi-dump-paths,LLNDK PLATFORM,$(PRIVATE_LSDUMP_PATHS)) \
 	  $(addsuffix .lsdump,$(PRIVATE_STUB_LIBRARIES)), \
 	  $(notdir $(PLATFORM_ABI_DUMPS))))))
 	$(if $(added_platform_abi_dumps), \
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 94892dc..474cb20 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -7,6 +7,7 @@
 LLNDK: libbinder_ndk.so
 LLNDK: libc.so
 LLNDK: libcgrouprc.so
+LLNDK: libcom.android.tethering.connectivity_native.so
 LLNDK: libdl.so
 LLNDK: libft2.so
 LLNDK: liblog.so
@@ -20,8 +21,7 @@
 LLNDK: libvulkan.so
 VNDK-SP: android.hardware.common-V2-ndk.so
 VNDK-SP: android.hardware.common.fmq-V1-ndk.so
-VNDK-SP: android.hardware.graphics.allocator-V1-ndk.so
-VNDK-SP: android.hardware.graphics.common-V3-ndk.so
+VNDK-SP: android.hardware.graphics.common-V4-ndk.so
 VNDK-SP: android.hardware.graphics.common@1.0.so
 VNDK-SP: android.hardware.graphics.common@1.1.so
 VNDK-SP: android.hardware.graphics.common@1.2.so
@@ -30,6 +30,7 @@
 VNDK-SP: android.hardware.graphics.mapper@2.1.so
 VNDK-SP: android.hardware.graphics.mapper@3.0.so
 VNDK-SP: android.hardware.graphics.mapper@4.0.so
+VNDK-SP: android.hardware.graphics.allocator-V2-ndk.so
 VNDK-SP: android.hardware.renderscript@1.0.so
 VNDK-SP: android.hidl.memory.token@1.0.so
 VNDK-SP: android.hidl.memory@1.0-impl.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 20493be..09d4bc9 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -23,6 +23,8 @@
 # - Released GSI contains more VNDK packages to support old version vendors
 # - etc.
 #
+# See device/generic/common/README.md for more details.
+#
 
 BUILDING_GSI := true
 
@@ -83,6 +85,7 @@
 PRODUCT_BUILD_VENDOR_IMAGE := false
 PRODUCT_BUILD_SUPER_PARTITION := false
 PRODUCT_BUILD_SUPER_EMPTY_IMAGE := false
+PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false
 PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true
 
 # Always build modules from source
diff --git a/target/product/module_arm.mk b/target/product/module_arm.mk
index d99dce8..434f7ad 100644
--- a/target/product/module_arm.mk
+++ b/target/product/module_arm.mk
@@ -17,5 +17,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
 
 PRODUCT_NAME := module_arm
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm
diff --git a/target/product/module_arm64.mk b/target/product/module_arm64.mk
index fc9529c..2e8c8a7 100644
--- a/target/product/module_arm64.mk
+++ b/target/product/module_arm64.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 
 PRODUCT_NAME := module_arm64
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm64
diff --git a/target/product/module_arm64only.mk b/target/product/module_arm64only.mk
index 4e8d53e..c0769bf 100644
--- a/target/product/module_arm64only.mk
+++ b/target/product/module_arm64only.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
 
 PRODUCT_NAME := module_arm64only
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm64only
diff --git a/target/product/module_common.mk b/target/product/module_common.mk
index 54f3949..84bd799 100644
--- a/target/product/module_common.mk
+++ b/target/product/module_common.mk
@@ -25,3 +25,10 @@
 # Builds using a module product should build modules from source, even if
 # BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE says otherwise.
 PRODUCT_MODULE_BUILD_FROM_SOURCE := true
+
+# Build sdk from source if the branch is not using slim manifests.
+ifneq (,$(strip $(wildcard frameworks/base/Android.bp)))
+  UNBUNDLED_BUILD_SDKS_FROM_SOURCE := true
+endif
+
+PRODUCT_BRAND := Android
diff --git a/target/product/module_x86.mk b/target/product/module_x86.mk
index b852e7a..f38e2b9 100644
--- a/target/product/module_x86.mk
+++ b/target/product/module_x86.mk
@@ -17,5 +17,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
 
 PRODUCT_NAME := module_x86
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86
diff --git a/target/product/module_x86_64.mk b/target/product/module_x86_64.mk
index f6bc1fc..20f443a 100644
--- a/target/product/module_x86_64.mk
+++ b/target/product/module_x86_64.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 
 PRODUCT_NAME := module_x86_64
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86_64
diff --git a/target/product/module_x86_64only.mk b/target/product/module_x86_64only.mk
index bca4541..b0d72bf 100644
--- a/target/product/module_x86_64only.mk
+++ b/target/product/module_x86_64only.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
 
 PRODUCT_NAME := module_x86_64only
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86_64only
diff --git a/target/product/ramdisk_stub.mk b/target/product/ramdisk_stub.mk
new file mode 100644
index 0000000..2a0b752
--- /dev/null
+++ b/target/product/ramdisk_stub.mk
@@ -0,0 +1,18 @@
+#
+# Copyright 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+PRODUCT_COPY_FILES += \
+    build/make/target/product/ramdisk_stub.mk:$(TARGET_COPY_OUT_VENDOR_RAMDISK)/nonempty
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 1ebd4ab..39666ea 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -95,7 +95,6 @@
 # The thermal cutoff value is currently set to THERMAL_STATUS_MODERATE.
 PRODUCT_SYSTEM_PROPERTIES += \
     dalvik.vm.usejit=true \
-    dalvik.vm.usejitprofiles=true \
     dalvik.vm.dexopt.secondary=true \
     dalvik.vm.dexopt.thermal-cutoff=2 \
     dalvik.vm.appimageformat=lz4
@@ -123,6 +122,7 @@
 # without exceptions).
 PRODUCT_SYSTEM_PROPERTIES += \
     pm.dexopt.post-boot?=extract \
+    pm.dexopt.boot-after-mainline-update?=verify \
     pm.dexopt.install?=speed-profile \
     pm.dexopt.install-fast?=skip \
     pm.dexopt.install-bulk?=speed-profile \
@@ -165,3 +165,13 @@
     dalvik.vm.usap_pool_size_max?=3 \
     dalvik.vm.usap_pool_size_min?=1 \
     dalvik.vm.usap_pool_refill_delay_ms?=3000
+
+# Allow dexopt files that are side-effects of already allowlisted files.
+# This is only necessary when ART is prebuilt.
+ifeq (false,$(ART_MODULE_BUILD_FROM_SOURCE))
+  PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
+      system/framework/%.art \
+      system/framework/%.oat \
+      system/framework/%.odex \
+      system/framework/%.vdex
+endif
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index 4203d45..3f81615 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index 6c88b44..48a0e3b 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
@@ -45,7 +44,7 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/arm32-vendor.mk)
+$(call inherit-product-if-exists, build/make/target/product/ramdisk_stub.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/emulator_arm/device.mk)
 
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index a324e5f..0f8b508 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index ff9018d..f5d9028 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/security/BUILD.bazel b/target/product/security/BUILD.bazel
new file mode 100644
index 0000000..c12be79
--- /dev/null
+++ b/target/product/security/BUILD.bazel
@@ -0,0 +1,8 @@
+filegroup(
+    name = "android_certificate_directory",
+    srcs = glob([
+        "*.pk8",
+        "*.pem",
+    ]),
+    visibility = ["//visibility:public"],
+)
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 716c8e0..418aaa4 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -12,29 +12,11 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
 
-# This file enables baseline features, such as io_uring,
-# userspace merge, etc. But sets compression method to none.
-# This .mk file also removes snapuserd from vendor ramdisk,
-# as T launching devices will have init_boot which has snapuserd
-# in generic ramdisk.
-# T launching devices should include this .mk file, and configure
-# compression algorithm by setting
-# PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD to gz or brotli. Complete
-# set of supported algorithms can be found in
-# system/core/fs_mgr/libsnapshot/cow_writer.cpp
-
-PRODUCT_VIRTUAL_AB_OTA := true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.enabled=true
-
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
-PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true
-
-PRODUCT_VIRTUAL_AB_COMPRESSION := true
-PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD ?= none
-PRODUCT_PACKAGES += \
-    snapuserd \
-
+# This file should be used only for T launching devices. We maintain
+# this file just for backward compatibility for T launch devices
+# so that build doesn't break.
+#
+# All U+ launching devices should instead use vabc_features.mk.
+$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
diff --git a/target/product/virtual_ab_ota/compression.mk b/target/product/virtual_ab_ota/compression.mk
index d5bd2a5..dc1ee3e 100644
--- a/target/product/virtual_ab_ota/compression.mk
+++ b/target/product/virtual_ab_ota/compression.mk
@@ -19,6 +19,12 @@
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
 PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
+
+# Enabling this property, will improve OTA install time
+# but will use an additional CPU core
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true
+
 PRODUCT_VIRTUAL_AB_COMPRESSION := true
 PRODUCT_PACKAGES += \
     snapuserd.vendor_ramdisk \
diff --git a/target/product/virtual_ab_ota/vabc_features.mk b/target/product/virtual_ab_ota/vabc_features.mk
new file mode 100644
index 0000000..874eb9c
--- /dev/null
+++ b/target/product/virtual_ab_ota/vabc_features.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2022 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# This file enables baseline features, such as io_uring,
+# userspace merge, etc. But sets compression method to none.
+# This .mk file also removes snapuserd from vendor ramdisk,
+# as T launching devices will have init_boot which has snapuserd
+# in generic ramdisk.
+#
+# T and U launching devices should include this .mk file, and configure
+# compression algorithm by setting
+# PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD to lz4, gz or brotli. Complete
+# set of supported algorithms can be found in
+# system/core/fs_mgr/libsnapshot/cow_writer.cpp
+
+PRODUCT_VIRTUAL_AB_OTA := true
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.enabled=true
+
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.userspace.snapshots.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.io_uring.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.xor.enabled=true
+PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.batch_writes=true
+
+# Enabling this property, will improve OTA install time
+# but will use an additional CPU core
+# PRODUCT_VENDOR_PROPERTIES += ro.virtual_ab.compression.threads=true
+
+PRODUCT_VIRTUAL_AB_COMPRESSION := true
+PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD ?= none
+PRODUCT_PACKAGES += \
+    snapuserd \
+
diff --git a/target/product/window_extensions.mk b/target/product/window_extensions.mk
new file mode 100644
index 0000000..5f5431f
--- /dev/null
+++ b/target/product/window_extensions.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# /system_ext packages
+PRODUCT_PACKAGES += \
+    androidx.window.extensions \
+    androidx.window.sidecar
+
+# properties
+PRODUCT_PRODUCT_PROPERTIES += \
+    persist.wm.extensions.enabled=true
diff --git a/tests/b_tests.sh b/tests/b_tests.sh
index f4e043c..491d762 100755
--- a/tests/b_tests.sh
+++ b/tests/b_tests.sh
@@ -18,12 +18,20 @@
 
 source $(dirname $0)/../envsetup.sh
 
+# lunch required to set up PATH to use b
+lunch aosp_arm64
+
 test_target=//build/bazel/scripts/difftool:difftool
 
 b build "$test_target"
+b build -- "$test_target"
 b build "$test_target" --run-soong-tests
 b build --run-soong-tests "$test_target"
 b --run-soong-tests build "$test_target"
+# Test that the bazel server can be restarted once shut down. If run in a
+# docker container, you need to run the docker container with --init or
+# have some other process as PID 1 that can reap zombies.
+b shutdown
 b cquery 'kind(test, //build/bazel/examples/android_app/...)' --config=android
 b run $test_target -- --help >/dev/null
 
diff --git a/tests/inherits_in_regular_variables/inherit1.rbc b/tests/inherits_in_regular_variables/inherit1.rbc
new file mode 100644
index 0000000..4ce8825
--- /dev/null
+++ b/tests/inherits_in_regular_variables/inherit1.rbc
@@ -0,0 +1,21 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  cfg.setdefault("PRODUCT_PACKAGES", [])
+  cfg["PRODUCT_PACKAGES"] += ["bar"]
diff --git a/tests/inherits_in_regular_variables/product.rbc b/tests/inherits_in_regular_variables/product.rbc
new file mode 100644
index 0000000..c193c65
--- /dev/null
+++ b/tests/inherits_in_regular_variables/product.rbc
@@ -0,0 +1,27 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load(":inherit1.rbc", _inherit1_init = "init")
+
+def init(g, handle):
+  cfg = rblf.cfg(handle)
+
+  cfg.setdefault("PRODUCT_PACKAGES", [])
+  cfg["PRODUCT_PACKAGES"] += ["foo"]
+
+  g["PRODUCT_PACKAGES_COPY"] = cfg["PRODUCT_PACKAGES"]
+
+  rblf.inherit(handle, "test/inherit1", _inherit1_init)
+
diff --git a/tests/inherits_in_regular_variables/test.rbc b/tests/inherits_in_regular_variables/test.rbc
new file mode 100644
index 0000000..3a76d8a
--- /dev/null
+++ b/tests/inherits_in_regular_variables/test.rbc
@@ -0,0 +1,30 @@
+# Copyright 2023 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      https://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load("//build/make/core:product_config.rbc", "rblf")
+load("//build/make/tests/input_variables.rbc", input_variables_init = "init")
+load(":product.rbc", "init")
+
+
+def assert_eq(expected, actual):
+    if expected != actual:
+        fail("Expected '%s', got '%s'" % (expected, actual))
+
+def test():
+    (globals, globals_base) = rblf.product_configuration("test/device", init, input_variables_init)
+    assert_eq(["foo", "bar"], globals["PRODUCTS.test/device.mk.PRODUCT_PACKAGES"])
+    assert_eq(["foo", ("test/inherit1",)], globals["PRODUCT_PACKAGES_COPY"])
+
+    # Ideally we would check that rblf.printvars returns the correct result, but we don't have
+    # a good way to intercept its output or mock rblf_cli
diff --git a/tests/run.rbc b/tests/run.rbc
index c6dfeba..33583eb 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -28,6 +28,7 @@
 load("//build/make/tests/single_value_inheritance:test.rbc", test_single_value_inheritance = "test")
 load("//build/make/tests/artifact_path_requirements:test.rbc", test_artifact_path_requirements = "test")
 load("//build/make/tests/prefixed_sort_order:test.rbc", test_prefixed_sort_order = "test")
+load("//build/make/tests/inherits_in_regular_variables:test.rbc", test_inherits_in_regular_variables = "test")
 
 def assert_eq(expected, actual):
     if expected != actual:
@@ -45,6 +46,11 @@
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
 assert_eq("1", rblf.mkstrip("1 "))
 
+assert_eq(["a", "b"], rblf.words("a b"))
+assert_eq(["a", "b", "c"], rblf.words(["a b", "c"]))
+# 1-tuple like we use in product variables
+assert_eq(["a b", ("c",)], rblf.words(["a b", ("c",)]))
+
 assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
 assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
 
@@ -162,6 +168,18 @@
 assert_eq({"A_LIST_VARIABLE": ["foo", "bar"]}, board_globals)
 assert_eq({"A_LIST_VARIABLE": ["foo"]}, board_globals_base)
 
+g = {"FOO": "a", "BAR": "c", "BAZ": "e"}
+cfg = {"FOO": "b", "BAR": "d", "BAZ": "f"}
+rblf.clear_var_list(g, struct(cfg=cfg), "FOO BAR NEWVAR")
+assert_eq("", g["FOO"])
+assert_eq("", cfg["FOO"])
+assert_eq("", g["BAR"])
+assert_eq("", cfg["BAR"])
+assert_eq("e", g["BAZ"])
+assert_eq("f", cfg["BAZ"])
+assert_eq("", g.get("NEWVAR"))
+
 test_single_value_inheritance()
 test_artifact_path_requirements()
 test_prefixed_sort_order()
+test_inherits_in_regular_variables()
diff --git a/tools/Android.bp b/tools/Android.bp
index f401058..bea0602 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -59,3 +59,26 @@
   name: "check_radio_versions",
   srcs: ["check_radio_versions.py"],
 }
+
+python_binary_host {
+  name: "check_elf_file",
+  srcs: ["check_elf_file.py"],
+}
+
+python_binary_host {
+  name: "generate_gts_shared_report",
+  srcs: ["generate_gts_shared_report.py"],
+}
+
+python_binary_host {
+    name: "list_files",
+    main: "list_files.py",
+    srcs: [
+        "list_files.py",
+    ],
+    version: {
+      py3: {
+        embedded_launcher: true,
+      }
+    }
+}
diff --git a/tools/atree/fs.cpp b/tools/atree/fs.cpp
index 6cd080e..d004e97 100644
--- a/tools/atree/fs.cpp
+++ b/tools/atree/fs.cpp
@@ -177,7 +177,7 @@
         } else {
             // Split the arguments if more than 1
             char* cmd = strdup(strip_cmd);
-            const char** args = (const char**) malloc(sizeof(const char*) * (num_args + 2));
+            const char** args = (const char**) calloc((num_args + 2), sizeof(const char*));
 
             const char** curr = args;
             char* s = cmd;
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index 943f238..ce64160 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2017 The Android Open Source Project
 #
@@ -69,7 +69,7 @@
   module = os.path.splitext(os.path.basename(target_config))[0]
   instrumentation = instrumentation_elements[0]
   manifest = manifest_elements[0]
-  if instrumentation.attributes.has_key(ATTRIBUTE_LABEL):
+  if ATTRIBUTE_LABEL in instrumentation.attributes:
     label = instrumentation.attributes[ATTRIBUTE_LABEL].value
   else:
     label = module
diff --git a/tools/check_elf_file.py b/tools/check_elf_file.py
index 0b80226..51ec23b 100755
--- a/tools/check_elf_file.py
+++ b/tools/check_elf_file.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2019 The Android Open Source Project
 #
@@ -196,11 +196,7 @@
   def _read_llvm_readobj(cls, elf_file_path, header, llvm_readobj):
     """Run llvm-readobj and parse the output."""
     cmd = [llvm_readobj, '--dynamic-table', '--dyn-symbols', elf_file_path]
-    proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
-    out, _ = proc.communicate()
-    rc = proc.returncode
-    if rc != 0:
-      raise subprocess.CalledProcessError(rc, cmd, out)
+    out = subprocess.check_output(cmd, text=True)
     lines = out.splitlines()
     return cls._parse_llvm_readobj(elf_file_path, header, lines)
 
@@ -411,8 +407,7 @@
     # Chech whether all DT_NEEDED entries are specified.
     for lib in self._file_under_test.dt_needed:
       if lib not in specified_sonames:
-        self._error('DT_NEEDED "{}" is not specified in shared_libs.'
-                    .format(lib.decode('utf-8')))
+        self._error(f'DT_NEEDED "{lib}" is not specified in shared_libs.')
         missing_shared_libs = True
 
     if missing_shared_libs:
@@ -467,7 +462,7 @@
     """Check whether all undefined symbols are resolved to a definition."""
     all_elf_files = [self._file_under_test] + self._shared_libs
     missing_symbols = []
-    for sym, imported_vers in self._file_under_test.imported.iteritems():
+    for sym, imported_vers in self._file_under_test.imported.items():
       for imported_ver in imported_vers:
         lib = self._find_symbol_from_libs(all_elf_files, sym, imported_ver)
         if not lib:
@@ -475,16 +470,14 @@
 
     if missing_symbols:
       for sym, ver in sorted(missing_symbols):
-        sym = sym.decode('utf-8')
         if ver:
-          sym += '@' + ver.decode('utf-8')
-        self._error('Unresolved symbol: {}'.format(sym))
+          sym += '@' + ver
+        self._error(f'Unresolved symbol: {sym}')
 
       self._note()
       self._note('Some dependencies might be changed, thus the symbol(s) '
                  'above cannot be resolved.')
-      self._note('Please re-build the prebuilt file: "{}".'
-                 .format(self._file_path))
+      self._note(f'Please re-build the prebuilt file: "{self._file_path}".')
 
       self._note()
       self._note('If this is a new prebuilt file and it is designed to have '
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index 2527df7..ef5c760 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -131,6 +131,22 @@
     testSrcs: ["cmd/xmlnotice/xmlnotice_test.go"],
 }
 
+blueprint_go_binary {
+    name: "compliance_sbom",
+    srcs: ["cmd/sbom/sbom.go"],
+    deps: [
+        "compliance-module",
+        "blueprint-deptools",
+        "soong-response",
+        "spdx-tools-spdxv2_2",
+        "spdx-tools-builder2v2",
+        "spdx-tools-spdxcommon",
+        "spdx-tools-spdx-json",
+        "spdx-tools-spdxlib",
+    ],
+    testSrcs: ["cmd/sbom/sbom_test.go"],
+}
+
 bootstrap_go_package {
     name: "compliance-module",
     srcs: [
diff --git a/tools/compliance/README.md b/tools/compliance/README.md
new file mode 100644
index 0000000..995d9ca
--- /dev/null
+++ b/tools/compliance/README.md
@@ -0,0 +1,101 @@
+# Compliance
+
+<!-- Much of this content appears too in doc.go
+When changing this file consider whether the change also applies to doc.go -->
+
+Package compliance provides an approved means for reading, consuming, and
+analyzing license metadata graphs.
+
+Assuming the license metadata and dependencies are fully and accurately
+recorded in the build system, any discrepancy between the official policy for
+open source license compliance and this code is **a bug in this code.**
+
+## Naming
+
+All of the code that directly reflects a policy decision belongs in a file with
+a name begninning `policy_`. Changes to these files need to be authored or
+reviewed by someone in OSPO or whichever successor group governs policy.
+
+The files with names not beginning `policy_` describe data types, and general,
+reusable algorithms.
+
+The source code for binary tools and utilities appears under the `cmd/`
+subdirectory. Other subdirectories contain reusable components that are not
+`compliance` per se.
+
+## Data Types
+
+A few principal types to understand are LicenseGraph, LicenseCondition, and
+ResolutionSet.
+
+### LicenseGraph
+
+A LicenseGraph is an immutable graph of the targets and dependencies reachable
+from a specific set of root targets. In general, the root targets will be the
+artifacts in a release or distribution. While conceptually immutable, parts of
+the graph may be loaded or evaluated lazily.
+
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
+### LicenseCondition
+
+A LicenseCondition is an immutable tuple pairing a condition name with an
+originating target. e.g. Per current policy, a static library licensed under an
+MIT license would pair a "notice" condition with the static library target, and
+a dynamic license licensed under GPL would pair a "restricted" condition with
+the dynamic library target.
+
+### ResolutionSet
+
+A ResolutionSet is an immutable set of `AttachesTo`, `ActsOn`, `Resolves`
+tuples describing how license conditions apply to targets.
+
+`AttachesTo` is the trigger for acting. Distribution of the target invokes
+the policy.
+
+`ActsOn` is the target to share, give notice for, hide etc.
+
+`Resolves` is the set of conditions that the action resolves.
+
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
+
+Restricted conditions are different. The infectious nature of restricted often
+means sharing code that is not the target where the restricted condition
+originates. Linking an MIT library to a GPL library implies a policy to share
+the MIT library despite the MIT license having no source sharing requirement.
+
+In this case, one or more resolution tuples will have the MIT license module in
+`ActsOn` and the restricted condition originating at the GPL library module in
+`Resolves`. These tuples will `AttachTo` every target that depends on the GPL
+library because shipping any of those targets trigger the policy to share the
+code.
+
+## Processes
+
+### ReadLicenseGraph
+
+The principal means to ingest license metadata. Given the distribution targets,
+ReadLicenseGraph populates the LicenseGraph for those root targets.
+
+### NoticeIndex.IndexLicenseTexts
+
+IndexLicenseTexts reads, deduplicates and caches license texts for notice
+files. Also reads and caches project metadata for deriving library names.
+
+The algorithm for deriving library names has not been dictated by OSPO policy,
+but reflects a pragmatic attempt to comply with Android policy regarding
+unreleased product names, proprietary partner names etc.
+
+### projectmetadata.Index.MetadataForProjects
+
+MetadataForProjects reads, deduplicates and caches project METADATA files used
+for notice library names, and various properties appearing in SBOMs.
diff --git a/tools/compliance/cmd/checkshare/checkshare_test.go b/tools/compliance/cmd/checkshare/checkshare_test.go
index fdcab29..079691c 100644
--- a/tools/compliance/cmd/checkshare/checkshare_test.go
+++ b/tools/compliance/cmd/checkshare/checkshare_test.go
@@ -241,6 +241,59 @@
 			roots:          []string{"lib/libd.so.meta_lic"},
 			expectedStdout: "PASS",
 		},
+		{
+			condition:      "regressconcur",
+			name:           "container",
+			roots:          []string{"container.zip.meta_lic"},
+			expectedStdout: "FAIL",
+			expectedOutcomes: outcomeList{
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin1.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin2.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin3.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin4.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin5.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin6.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin7.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin8.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+				&outcome{
+					target:           "testdata/regressconcur/bin/bin9.meta_lic",
+					privacyCondition: "proprietary",
+					shareCondition:   "restricted",
+				},
+			},
+		},
 	}
 	for _, tt := range tests {
 		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
diff --git a/tools/compliance/cmd/dumpgraph/dumpgraph_test.go b/tools/compliance/cmd/dumpgraph/dumpgraph_test.go
index d1deed3..e2d0db0 100644
--- a/tools/compliance/cmd/dumpgraph/dumpgraph_test.go
+++ b/tools/compliance/cmd/dumpgraph/dumpgraph_test.go
@@ -341,13 +341,13 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []string{
-				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking static",
+				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_if_statically_linked static",
 				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:reciprocal static",
 				"bin/bin2.meta_lic:notice lib/libb.so.meta_lic:restricted dynamic",
 				"bin/bin2.meta_lic:notice lib/libd.so.meta_lic:notice dynamic",
 				"highest.apex.meta_lic:notice bin/bin1.meta_lic:notice static",
 				"highest.apex.meta_lic:notice bin/bin2.meta_lic:notice static",
-				"highest.apex.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking static",
+				"highest.apex.meta_lic:notice lib/liba.so.meta_lic:restricted_if_statically_linked static",
 				"highest.apex.meta_lic:notice lib/libb.so.meta_lic:restricted static",
 			},
 		},
@@ -1011,7 +1011,7 @@
 				matchTarget("bin/bin1.meta_lic", "notice"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
 				matchTarget("highest.apex.meta_lic", "notice"),
-				matchTarget("lib/liba.so.meta_lic", "restricted_allows_dynamic_linking"),
+				matchTarget("lib/liba.so.meta_lic", "restricted_if_statically_linked"),
 				matchTarget("lib/libb.so.meta_lic", "restricted"),
 				matchTarget("lib/libc.a.meta_lic", "reciprocal"),
 				matchTarget("lib/libd.so.meta_lic", "notice"),
diff --git a/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go b/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
index 63fd157..227942b 100644
--- a/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
+++ b/tools/compliance/cmd/dumpresolutions/dumpresolutions_test.go
@@ -529,18 +529,18 @@
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 				"testdata/restricted/bin/bin2.meta_lic testdata/restricted/bin/bin2.meta_lic notice:restricted",
 				"testdata/restricted/bin/bin2.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
-				"testdata/restricted/highest.apex.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
+				"testdata/restricted/highest.apex.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_if_statically_linked",
 				"testdata/restricted/highest.apex.meta_lic testdata/restricted/bin/bin2.meta_lic notice:restricted",
-				"testdata/restricted/highest.apex.meta_lic testdata/restricted/highest.apex.meta_lic notice:restricted:restricted_allows_dynamic_linking",
-				"testdata/restricted/highest.apex.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/highest.apex.meta_lic testdata/restricted/highest.apex.meta_lic notice:restricted:restricted_if_statically_linked",
+				"testdata/restricted/highest.apex.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/highest.apex.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
-				"testdata/restricted/highest.apex.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
-				"testdata/restricted/lib/liba.so.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/highest.apex.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
+				"testdata/restricted/lib/liba.so.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/lib/libb.so.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -550,18 +550,18 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx:       context{stripPrefix: []string{"testdata/restricted/"}},
 			expectedOut: []string{
-				"bin/bin1.meta_lic bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"bin/bin1.meta_lic bin/bin1.meta_lic notice:restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 				"bin/bin2.meta_lic bin/bin2.meta_lic notice:restricted",
 				"bin/bin2.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic bin/bin1.meta_lic notice:restricted_if_statically_linked",
 				"highest.apex.meta_lic bin/bin2.meta_lic notice:restricted",
-				"highest.apex.meta_lic highest.apex.meta_lic notice:restricted:restricted_allows_dynamic_linking",
-				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic highest.apex.meta_lic notice:restricted:restricted_if_statically_linked",
+				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"highest.apex.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
-				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
+				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"lib/libb.so.meta_lic lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -590,18 +590,18 @@
 				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{
-				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 				"bin/bin2.meta_lic bin/bin2.meta_lic restricted",
 				"bin/bin2.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic bin/bin1.meta_lic restricted_if_statically_linked",
 				"highest.apex.meta_lic bin/bin2.meta_lic restricted",
-				"highest.apex.meta_lic highest.apex.meta_lic restricted:restricted_allows_dynamic_linking",
-				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic highest.apex.meta_lic restricted:restricted_if_statically_linked",
+				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"highest.apex.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
-				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
+				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"lib/libb.so.meta_lic lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -624,18 +624,18 @@
 				stripPrefix: []string{"testdata/restricted/"},
 			},
 			expectedOut: []string{
-				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"bin/bin1.meta_lic bin/bin1.meta_lic restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
+				"bin/bin1.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 				"bin/bin2.meta_lic bin/bin2.meta_lic restricted",
 				"bin/bin2.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic bin/bin1.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic bin/bin1.meta_lic restricted_if_statically_linked",
 				"highest.apex.meta_lic bin/bin2.meta_lic restricted",
-				"highest.apex.meta_lic highest.apex.meta_lic restricted:restricted_allows_dynamic_linking",
-				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic highest.apex.meta_lic restricted:restricted_if_statically_linked",
+				"highest.apex.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"highest.apex.meta_lic lib/libb.so.meta_lic restricted",
-				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
-				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
+				"lib/liba.so.meta_lic lib/liba.so.meta_lic restricted_if_statically_linked",
 				"lib/libb.so.meta_lic lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -645,18 +645,18 @@
 			roots:     []string{"highest.apex.meta_lic"},
 			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []string{
-				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice:restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking restricted_allows_dynamic_linking",
-				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:reciprocal reciprocal:restricted_allows_dynamic_linking",
+				"bin/bin1.meta_lic:notice bin/bin1.meta_lic:notice notice:restricted_if_statically_linked",
+				"bin/bin1.meta_lic:notice lib/liba.so.meta_lic:restricted_if_statically_linked restricted_if_statically_linked",
+				"bin/bin1.meta_lic:notice lib/libc.a.meta_lic:reciprocal reciprocal:restricted_if_statically_linked",
 				"bin/bin2.meta_lic:notice bin/bin2.meta_lic:notice notice:restricted",
 				"bin/bin2.meta_lic:notice lib/libb.so.meta_lic:restricted restricted",
-				"highest.apex.meta_lic:notice bin/bin1.meta_lic:notice notice:restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic:notice bin/bin1.meta_lic:notice notice:restricted_if_statically_linked",
 				"highest.apex.meta_lic:notice bin/bin2.meta_lic:notice notice:restricted",
-				"highest.apex.meta_lic:notice highest.apex.meta_lic:notice notice:restricted:restricted_allows_dynamic_linking",
-				"highest.apex.meta_lic:notice lib/liba.so.meta_lic:restricted_allows_dynamic_linking restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic:notice highest.apex.meta_lic:notice notice:restricted:restricted_if_statically_linked",
+				"highest.apex.meta_lic:notice lib/liba.so.meta_lic:restricted_if_statically_linked restricted_if_statically_linked",
 				"highest.apex.meta_lic:notice lib/libb.so.meta_lic:restricted restricted",
-				"highest.apex.meta_lic:notice lib/libc.a.meta_lic:reciprocal reciprocal:restricted_allows_dynamic_linking",
-				"lib/liba.so.meta_lic:restricted_allows_dynamic_linking lib/liba.so.meta_lic:restricted_allows_dynamic_linking restricted_allows_dynamic_linking",
+				"highest.apex.meta_lic:notice lib/libc.a.meta_lic:reciprocal reciprocal:restricted_if_statically_linked",
+				"lib/liba.so.meta_lic:restricted_if_statically_linked lib/liba.so.meta_lic:restricted_if_statically_linked restricted_if_statically_linked",
 				"lib/libb.so.meta_lic:restricted lib/libb.so.meta_lic:restricted restricted",
 			},
 		},
@@ -665,18 +665,18 @@
 			name:      "container",
 			roots:     []string{"container.zip.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 				"testdata/restricted/bin/bin2.meta_lic testdata/restricted/bin/bin2.meta_lic notice:restricted",
 				"testdata/restricted/bin/bin2.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
-				"testdata/restricted/container.zip.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
+				"testdata/restricted/container.zip.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_if_statically_linked",
 				"testdata/restricted/container.zip.meta_lic testdata/restricted/bin/bin2.meta_lic notice:restricted",
-				"testdata/restricted/container.zip.meta_lic testdata/restricted/container.zip.meta_lic notice:restricted:restricted_allows_dynamic_linking",
-				"testdata/restricted/container.zip.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/container.zip.meta_lic testdata/restricted/container.zip.meta_lic notice:restricted:restricted_if_statically_linked",
+				"testdata/restricted/container.zip.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/container.zip.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
-				"testdata/restricted/container.zip.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
-				"testdata/restricted/lib/liba.so.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/container.zip.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
+				"testdata/restricted/lib/liba.so.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/lib/libb.so.meta_lic testdata/restricted/lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -685,8 +685,8 @@
 			name:      "application",
 			roots:     []string{"application.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/application.meta_lic testdata/restricted/application.meta_lic notice:restricted:restricted_allows_dynamic_linking",
-				"testdata/restricted/application.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted:restricted_allows_dynamic_linking",
+				"testdata/restricted/application.meta_lic testdata/restricted/application.meta_lic notice:restricted:restricted_if_statically_linked",
+				"testdata/restricted/application.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted:restricted_if_statically_linked",
 			},
 		},
 		{
@@ -694,9 +694,9 @@
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
-				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_allows_dynamic_linking",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/bin/bin1.meta_lic notice:restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
+				"testdata/restricted/bin/bin1.meta_lic testdata/restricted/lib/libc.a.meta_lic reciprocal:restricted_if_statically_linked",
 			},
 		},
 		{
@@ -2235,17 +2235,17 @@
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/bin/bin2.meta_lic",
 					"testdata/restricted/bin/bin2.meta_lic",
@@ -2258,7 +2258,7 @@
 				matchResolution(
 					"testdata/restricted/highest.apex.meta_lic",
 					"testdata/restricted/bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/highest.apex.meta_lic",
@@ -2269,12 +2269,12 @@
 					"testdata/restricted/highest.apex.meta_lic",
 					"testdata/restricted/highest.apex.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/highest.apex.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/highest.apex.meta_lic",
 					"testdata/restricted/lib/libb.so.meta_lic",
@@ -2283,11 +2283,11 @@
 					"testdata/restricted/highest.apex.meta_lic",
 					"testdata/restricted/lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/lib/liba.so.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/lib/libb.so.meta_lic",
 					"testdata/restricted/lib/libb.so.meta_lic",
@@ -2309,17 +2309,17 @@
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin2.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2332,7 +2332,7 @@
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"highest.apex.meta_lic",
@@ -2343,12 +2343,12 @@
 					"highest.apex.meta_lic",
 					"highest.apex.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2357,11 +2357,11 @@
 					"highest.apex.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/liba.so.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/libb.so.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2420,16 +2420,16 @@
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin2.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2441,7 +2441,7 @@
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2450,11 +2450,11 @@
 					"highest.apex.meta_lic",
 					"highest.apex.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2463,11 +2463,11 @@
 					"highest.apex.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/liba.so.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/libb.so.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2502,16 +2502,16 @@
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin2.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2523,7 +2523,7 @@
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2532,11 +2532,11 @@
 					"highest.apex.meta_lic",
 					"highest.apex.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2545,11 +2545,11 @@
 					"highest.apex.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/liba.so.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/libb.so.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2563,7 +2563,7 @@
 			ctx:       context{stripPrefix: []string{"testdata/restricted/"}, labelConditions: true},
 			expectedOut: []getMatcher{
 				matchTarget("bin/bin1.meta_lic", "notice"),
-				matchTarget("lib/liba.so.meta_lic", "restricted_allows_dynamic_linking"),
+				matchTarget("lib/liba.so.meta_lic", "restricted_if_statically_linked"),
 				matchTarget("lib/libc.a.meta_lic", "reciprocal"),
 				matchTarget("bin/bin2.meta_lic", "notice"),
 				matchTarget("lib/libb.so.meta_lic", "restricted"),
@@ -2571,17 +2571,17 @@
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin1.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"bin/bin2.meta_lic",
 					"bin/bin2.meta_lic",
@@ -2594,7 +2594,7 @@
 				matchResolution(
 					"highest.apex.meta_lic",
 					"bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"highest.apex.meta_lic",
@@ -2605,12 +2605,12 @@
 					"highest.apex.meta_lic",
 					"highest.apex.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"highest.apex.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2619,11 +2619,11 @@
 					"highest.apex.meta_lic",
 					"lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/liba.so.meta_lic",
 					"lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"lib/libb.so.meta_lic",
 					"lib/libb.so.meta_lic",
@@ -2644,17 +2644,17 @@
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/bin/bin2.meta_lic",
 					"testdata/restricted/bin/bin2.meta_lic",
@@ -2667,7 +2667,7 @@
 				matchResolution(
 					"testdata/restricted/container.zip.meta_lic",
 					"testdata/restricted/bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/container.zip.meta_lic",
@@ -2678,12 +2678,12 @@
 					"testdata/restricted/container.zip.meta_lic",
 					"testdata/restricted/container.zip.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/container.zip.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/container.zip.meta_lic",
 					"testdata/restricted/lib/libb.so.meta_lic",
@@ -2692,11 +2692,11 @@
 					"testdata/restricted/container.zip.meta_lic",
 					"testdata/restricted/lib/libc.a.meta_lic",
 					"reciprocal",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/lib/liba.so.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/lib/libb.so.meta_lic",
 					"testdata/restricted/lib/libb.so.meta_lic",
@@ -2714,12 +2714,12 @@
 					"testdata/restricted/application.meta_lic",
 					"testdata/restricted/application.meta_lic",
 					"restricted",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/application.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"restricted"),
 			},
 		},
@@ -2734,16 +2734,16 @@
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/bin/bin1.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"notice"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/liba.so.meta_lic",
-					"restricted_allows_dynamic_linking"),
+					"restricted_if_statically_linked"),
 				matchResolution(
 					"testdata/restricted/bin/bin1.meta_lic",
 					"testdata/restricted/lib/libc.a.meta_lic",
-					"restricted_allows_dynamic_linking",
+					"restricted_if_statically_linked",
 					"reciprocal"),
 			},
 		},
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice.go b/tools/compliance/cmd/htmlnotice/htmlnotice.go
index 1a49610..78371ee 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice.go
@@ -24,6 +24,7 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
 	"android/soong/response"
@@ -275,7 +276,8 @@
 	}
 	fmt.Fprintln(ctx.stdout, "</body></html>")
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
index b927018..8dc1197 100644
--- a/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
+++ b/tools/compliance/cmd/htmlnotice/htmlnotice_test.go
@@ -78,7 +78,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition:  "firstparty",
@@ -106,7 +115,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -124,7 +142,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition:  "firstparty",
@@ -154,7 +181,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -170,7 +206,16 @@
 				usedBy{"container.zip/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -182,7 +227,13 @@
 				usedBy{"application"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -194,7 +245,12 @@
 				usedBy{"bin/bin1"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -206,7 +262,10 @@
 				usedBy{"lib/libd.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -231,6 +290,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -256,6 +322,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -275,6 +348,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -296,6 +373,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -308,7 +388,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -333,6 +416,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -358,6 +448,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -377,6 +474,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -398,6 +499,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -410,7 +514,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "restricted",
@@ -440,6 +547,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -470,6 +584,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -489,6 +610,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -513,6 +638,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -525,7 +653,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -555,6 +686,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -586,6 +724,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -606,6 +751,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -627,6 +776,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -639,7 +791,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/cmd/listshare/listshare_test.go b/tools/compliance/cmd/listshare/listshare_test.go
index fb61583..16a8b69 100644
--- a/tools/compliance/cmd/listshare/listshare_test.go
+++ b/tools/compliance/cmd/listshare/listshare_test.go
@@ -187,7 +187,7 @@
 				},
 				{
 					project:    "device/library",
-					conditions: []string{"restricted_allows_dynamic_linking"},
+					conditions: []string{"restricted_if_statically_linked"},
 				},
 				{
 					project:    "dynamic/binary",
@@ -196,14 +196,14 @@
 				{
 					project: "static/binary",
 					conditions: []string{
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 				{
 					project: "static/library",
 					conditions: []string{
 						"reciprocal",
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 			},
@@ -219,7 +219,7 @@
 				},
 				{
 					project:    "device/library",
-					conditions: []string{"restricted_allows_dynamic_linking"},
+					conditions: []string{"restricted_if_statically_linked"},
 				},
 				{
 					project:    "dynamic/binary",
@@ -228,14 +228,14 @@
 				{
 					project: "static/binary",
 					conditions: []string{
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 				{
 					project: "static/library",
 					conditions: []string{
 						"reciprocal",
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 			},
@@ -249,14 +249,14 @@
 					project: "device/library",
 					conditions: []string{
 						"restricted",
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 				{
 					project: "distributable/application",
 					conditions: []string{
 						"restricted",
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 			},
@@ -269,20 +269,20 @@
 				{
 					project: "device/library",
 					conditions: []string{
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 				{
 					project: "static/binary",
 					conditions: []string{
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 				{
 					project: "static/library",
 					conditions: []string{
 						"reciprocal",
-						"restricted_allows_dynamic_linking",
+						"restricted_if_statically_linked",
 					},
 				},
 			},
diff --git a/tools/compliance/cmd/rtrace/rtrace_test.go b/tools/compliance/cmd/rtrace/rtrace_test.go
index cbe9461..d650868 100644
--- a/tools/compliance/cmd/rtrace/rtrace_test.go
+++ b/tools/compliance/cmd/rtrace/rtrace_test.go
@@ -44,9 +44,9 @@
 		expectedOut []string
 	}{
 		{
-			condition: "firstparty",
-			name:      "apex",
-			roots:     []string{"highest.apex.meta_lic"},
+			condition:   "firstparty",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
@@ -60,33 +60,33 @@
 			expectedOut: []string{},
 		},
 		{
-			condition: "firstparty",
-			name:      "container",
-			roots:     []string{"container.zip.meta_lic"},
+			condition:   "firstparty",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "firstparty",
-			name:      "application",
-			roots:     []string{"application.meta_lic"},
+			condition:   "firstparty",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "firstparty",
-			name:      "binary",
-			roots:     []string{"bin/bin1.meta_lic"},
+			condition:   "firstparty",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "firstparty",
-			name:      "library",
-			roots:     []string{"lib/libd.so.meta_lic"},
+			condition:   "firstparty",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "notice",
-			name:      "apex",
-			roots:     []string{"highest.apex.meta_lic"},
+			condition:   "notice",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
@@ -100,33 +100,33 @@
 			expectedOut: []string{},
 		},
 		{
-			condition: "notice",
-			name:      "container",
-			roots:     []string{"container.zip.meta_lic"},
+			condition:   "notice",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "notice",
-			name:      "application",
-			roots:     []string{"application.meta_lic"},
+			condition:   "notice",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "notice",
-			name:      "binary",
-			roots:     []string{"bin/bin1.meta_lic"},
+			condition:   "notice",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "notice",
-			name:      "library",
-			roots:     []string{"lib/libd.so.meta_lic"},
+			condition:   "notice",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "reciprocal",
-			name:      "apex",
-			roots:     []string{"highest.apex.meta_lic"},
+			condition:   "reciprocal",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
@@ -140,27 +140,27 @@
 			expectedOut: []string{},
 		},
 		{
-			condition: "reciprocal",
-			name:      "container",
-			roots:     []string{"container.zip.meta_lic"},
+			condition:   "reciprocal",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "reciprocal",
-			name:      "application",
-			roots:     []string{"application.meta_lic"},
+			condition:   "reciprocal",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "reciprocal",
-			name:      "binary",
-			roots:     []string{"bin/bin1.meta_lic"},
+			condition:   "reciprocal",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "reciprocal",
-			name:      "library",
-			roots:     []string{"lib/libd.so.meta_lic"},
+			condition:   "reciprocal",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
@@ -168,7 +168,7 @@
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/lib/libb.so.meta_lic restricted",
 			},
 		},
@@ -180,7 +180,7 @@
 				sources:     []string{"testdata/restricted/bin/bin1.meta_lic"},
 				stripPrefix: []string{"testdata/restricted/"},
 			},
-			expectedOut: []string{"lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+			expectedOut: []string{"lib/liba.so.meta_lic restricted_if_statically_linked"},
 		},
 		{
 			condition: "restricted",
@@ -197,32 +197,32 @@
 			name:      "container",
 			roots:     []string{"container.zip.meta_lic"},
 			expectedOut: []string{
-				"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking",
+				"testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked",
 				"testdata/restricted/lib/libb.so.meta_lic restricted",
 			},
 		},
 		{
-			condition: "restricted",
-			name:      "application",
-			roots:     []string{"application.meta_lic"},
-			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+			condition:   "restricted",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
+			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked"},
 		},
 		{
-			condition: "restricted",
-			name:      "binary",
-			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_allows_dynamic_linking"},
+			condition:   "restricted",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
+			expectedOut: []string{"testdata/restricted/lib/liba.so.meta_lic restricted_if_statically_linked"},
 		},
 		{
-			condition: "restricted",
-			name:      "library",
-			roots:     []string{"lib/libd.so.meta_lic"},
+			condition:   "restricted",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "proprietary",
-			name:      "apex",
-			roots:     []string{"highest.apex.meta_lic"},
+			condition:   "proprietary",
+			name:        "apex",
+			roots:       []string{"highest.apex.meta_lic"},
 			expectedOut: []string{"testdata/proprietary/lib/libb.so.meta_lic restricted"},
 		},
 		{
@@ -246,27 +246,27 @@
 			expectedOut: []string{"lib/libb.so.meta_lic restricted"},
 		},
 		{
-			condition: "proprietary",
-			name:      "container",
-			roots:     []string{"container.zip.meta_lic"},
+			condition:   "proprietary",
+			name:        "container",
+			roots:       []string{"container.zip.meta_lic"},
 			expectedOut: []string{"testdata/proprietary/lib/libb.so.meta_lic restricted"},
 		},
 		{
-			condition: "proprietary",
-			name:      "application",
-			roots:     []string{"application.meta_lic"},
+			condition:   "proprietary",
+			name:        "application",
+			roots:       []string{"application.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "proprietary",
-			name:      "binary",
-			roots:     []string{"bin/bin1.meta_lic"},
+			condition:   "proprietary",
+			name:        "binary",
+			roots:       []string{"bin/bin1.meta_lic"},
 			expectedOut: []string{},
 		},
 		{
-			condition: "proprietary",
-			name:      "library",
-			roots:     []string{"lib/libd.so.meta_lic"},
+			condition:   "proprietary",
+			name:        "library",
+			roots:       []string{"lib/libd.so.meta_lic"},
 			expectedOut: []string{},
 		},
 	}
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
new file mode 100644
index 0000000..c378e39
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -0,0 +1,538 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"crypto/sha1"
+	"encoding/hex"
+	"flag"
+	"fmt"
+	"io"
+	"io/fs"
+	"os"
+	"path/filepath"
+	"sort"
+	"strings"
+	"time"
+
+	"android/soong/response"
+	"android/soong/tools/compliance"
+	"android/soong/tools/compliance/projectmetadata"
+
+	"github.com/google/blueprint/deptools"
+
+	"github.com/spdx/tools-golang/builder/builder2v2"
+	"github.com/spdx/tools-golang/json"
+	"github.com/spdx/tools-golang/spdx/common"
+	spdx "github.com/spdx/tools-golang/spdx/v2_2"
+	"github.com/spdx/tools-golang/spdxlib"
+)
+
+var (
+	failNoneRequested = fmt.Errorf("\nNo license metadata files requested")
+	failNoLicenses    = fmt.Errorf("No licenses found")
+)
+
+const NOASSERTION = "NOASSERTION"
+
+type context struct {
+	stdout       io.Writer
+	stderr       io.Writer
+	rootFS       fs.FS
+	product      string
+	stripPrefix  []string
+	creationTime creationTimeGetter
+}
+
+func (ctx context) strip(installPath string) string {
+	for _, prefix := range ctx.stripPrefix {
+		if strings.HasPrefix(installPath, prefix) {
+			p := strings.TrimPrefix(installPath, prefix)
+			if 0 == len(p) {
+				p = ctx.product
+			}
+			if 0 == len(p) {
+				continue
+			}
+			return p
+		}
+	}
+	return installPath
+}
+
+// newMultiString creates a flag that allows multiple values in an array.
+func newMultiString(flags *flag.FlagSet, name, usage string) *multiString {
+	var f multiString
+	flags.Var(&f, name, usage)
+	return &f
+}
+
+// multiString implements the flag `Value` interface for multiple strings.
+type multiString []string
+
+func (ms *multiString) String() string     { return strings.Join(*ms, ", ") }
+func (ms *multiString) Set(s string) error { *ms = append(*ms, s); return nil }
+
+func main() {
+	var expandedArgs []string
+	for _, arg := range os.Args[1:] {
+		if strings.HasPrefix(arg, "@") {
+			f, err := os.Open(strings.TrimPrefix(arg, "@"))
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+
+			respArgs, err := response.ReadRspFile(f)
+			f.Close()
+			if err != nil {
+				fmt.Fprintln(os.Stderr, err.Error())
+				os.Exit(1)
+			}
+			expandedArgs = append(expandedArgs, respArgs...)
+		} else {
+			expandedArgs = append(expandedArgs, arg)
+		}
+	}
+
+	flags := flag.NewFlagSet("flags", flag.ExitOnError)
+
+	flags.Usage = func() {
+		fmt.Fprintf(os.Stderr, `Usage: %s {options} file.meta_lic {file.meta_lic...}
+
+Outputs an SBOM.spdx.
+
+Options:
+`, filepath.Base(os.Args[0]))
+		flags.PrintDefaults()
+	}
+
+	outputFile := flags.String("o", "-", "Where to write the SBOM spdx file. (default stdout)")
+	depsFile := flags.String("d", "", "Where to write the deps file")
+	product := flags.String("product", "", "The name of the product for which the notice is generated.")
+	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+
+	flags.Parse(expandedArgs)
+
+	// Must specify at least one root target.
+	if flags.NArg() == 0 {
+		flags.Usage()
+		os.Exit(2)
+	}
+
+	if len(*outputFile) == 0 {
+		flags.Usage()
+		fmt.Fprintf(os.Stderr, "must specify file for -o; use - for stdout\n")
+		os.Exit(2)
+	} else {
+		dir, err := filepath.Abs(filepath.Dir(*outputFile))
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot determine path to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+		fi, err := os.Stat(dir)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "cannot read directory %q of %q: %s\n", dir, *outputFile, err)
+			os.Exit(1)
+		}
+		if !fi.IsDir() {
+			fmt.Fprintf(os.Stderr, "parent %q of %q is not a directory\n", dir, *outputFile)
+			os.Exit(1)
+		}
+	}
+
+	var ofile io.Writer
+	ofile = os.Stdout
+	var obuf *bytes.Buffer
+	if *outputFile != "-" {
+		obuf = &bytes.Buffer{}
+		ofile = obuf
+	}
+
+	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+
+	spdxDoc, deps, err := sbomGenerator(ctx, flags.Args()...)
+
+	if err != nil {
+		if err == failNoneRequested {
+			flags.Usage()
+		}
+		fmt.Fprintf(os.Stderr, "%s\n", err.Error())
+		os.Exit(1)
+	}
+
+	// writing the spdx Doc created
+	if err := spdx_json.Save2_2(spdxDoc, ofile); err != nil {
+		fmt.Fprintf(os.Stderr, "failed to write document to %v: %v", *outputFile, err)
+		os.Exit(1)
+	}
+
+	if *outputFile != "-" {
+		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write output to %q: %s\n", *outputFile, err)
+			os.Exit(1)
+		}
+	}
+
+	if *depsFile != "" {
+		err := deptools.WriteDepFile(*depsFile, *outputFile, deps)
+		if err != nil {
+			fmt.Fprintf(os.Stderr, "could not write deps to %q: %s\n", *depsFile, err)
+			os.Exit(1)
+		}
+	}
+	os.Exit(0)
+}
+
+type creationTimeGetter func() string
+
+// actualTime returns current time in UTC
+func actualTime() string {
+	t := time.Now().UTC()
+	return t.UTC().Format("2006-01-02T15:04:05Z")
+}
+
+// replaceSlashes replaces "/" by "-" for the library path to be used for packages & files SPDXID
+func replaceSlashes(x string) string {
+	return strings.ReplaceAll(x, "/", "-")
+}
+
+// stripDocName removes the outdir prefix and meta_lic suffix from a target Name
+func stripDocName(name string) string {
+	// remove outdir prefix
+	if strings.HasPrefix(name, "out/") {
+		name = name[4:]
+	}
+
+	// remove suffix
+	if strings.HasSuffix(name, ".meta_lic") {
+		name = name[:len(name)-9]
+	} else if strings.HasSuffix(name, "/meta_lic") {
+		name = name[:len(name)-9] + "/"
+	}
+
+	return name
+}
+
+// getPackageName returns a package name of a target Node
+func getPackageName(_ *context, tn *compliance.TargetNode) string {
+	return replaceSlashes(tn.Name())
+}
+
+// getDocumentName returns a package name of a target Node
+func getDocumentName(ctx *context, tn *compliance.TargetNode, pm *projectmetadata.ProjectMetadata) string {
+	if len(ctx.product) > 0 {
+		return replaceSlashes(ctx.product)
+	}
+	if len(tn.ModuleName()) > 0 {
+		if pm != nil {
+			return replaceSlashes(pm.Name() + ":" + tn.ModuleName())
+		}
+		return replaceSlashes(tn.ModuleName())
+	}
+
+	return stripDocName(replaceSlashes(tn.Name()))
+}
+
+// getDownloadUrl returns the download URL if available (GIT, SVN, etc..),
+// or NOASSERTION if not available, none determined or ambiguous
+func getDownloadUrl(_ *context, pm *projectmetadata.ProjectMetadata) string {
+	if pm == nil {
+		return NOASSERTION
+	}
+
+	urlsByTypeName := pm.UrlsByTypeName()
+	if urlsByTypeName == nil {
+		return NOASSERTION
+	}
+
+	url := urlsByTypeName.DownloadUrl()
+	if url == "" {
+		return NOASSERTION
+	}
+	return url
+}
+
+// getProjectMetadata returns the optimal project metadata for the target node
+func getProjectMetadata(_ *context, pmix *projectmetadata.Index,
+	tn *compliance.TargetNode) (*projectmetadata.ProjectMetadata, error) {
+	pms, err := pmix.MetadataForProjects(tn.Projects()...)
+	if err != nil {
+		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn, err)
+	}
+	if len(pms) == 0 {
+		return nil, nil
+	}
+
+	// Getting the project metadata that contains most of the info needed for sbomGenerator
+	score := -1
+	index := -1
+	for i := 0; i < len(pms); i++ {
+		tempScore := 0
+		if pms[i].Name() != "" {
+			tempScore += 1
+		}
+		if pms[i].Version() != "" {
+			tempScore += 1
+		}
+		if pms[i].UrlsByTypeName().DownloadUrl() != "" {
+			tempScore += 1
+		}
+
+		if tempScore == score {
+			if pms[i].Project() < pms[index].Project() {
+				index = i
+			}
+		} else if tempScore > score {
+			score = tempScore
+			index = i
+		}
+	}
+	return pms[index], nil
+}
+
+// inputFiles returns the complete list of files read
+func inputFiles(lg *compliance.LicenseGraph, pmix *projectmetadata.Index, licenseTexts []string) []string {
+	projectMeta := pmix.AllMetadataFiles()
+	targets := lg.TargetNames()
+	files := make([]string, 0, len(licenseTexts)+len(targets)+len(projectMeta))
+	files = append(files, licenseTexts...)
+	files = append(files, targets...)
+	files = append(files, projectMeta...)
+	return files
+}
+
+// generateSPDXNamespace generates a unique SPDX Document Namespace using a SHA1 checksum
+// and the CreationInfo.Created field as the date.
+func generateSPDXNamespace(created string) string {
+	// Compute a SHA1 checksum of the CreationInfo.Created field.
+	hash := sha1.Sum([]byte(created))
+	checksum := hex.EncodeToString(hash[:])
+
+	// Combine the checksum and timestamp to generate the SPDX Namespace.
+	namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s-%s", created, checksum)
+
+	return namespace
+}
+
+// sbomGenerator implements the spdx bom utility
+
+// SBOM is part of the new government regulation issued to improve national cyber security
+// and enhance software supply chain and transparency, see https://www.cisa.gov/sbom
+
+// sbomGenerator uses the SPDX standard, see the SPDX specification (https://spdx.github.io/spdx-spec/)
+// sbomGenerator is also following the internal google SBOM styleguide (http://goto.google.com/spdx-style-guide)
+func sbomGenerator(ctx *context, files ...string) (*spdx.Document, []string, error) {
+	// Must be at least one root file.
+	if len(files) < 1 {
+		return nil, nil, failNoneRequested
+	}
+
+	pmix := projectmetadata.NewIndex(ctx.rootFS)
+
+	lg, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
+
+	if err != nil {
+		return nil, nil, fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+	}
+
+	// creating the packages section
+	pkgs := []*spdx.Package{}
+
+	// creating the relationship section
+	relationships := []*spdx.Relationship{}
+
+	// creating the license section
+	otherLicenses := []*spdx.OtherLicense{}
+
+	// spdx document name
+	var docName string
+
+	// main package name
+	var mainPkgName string
+
+	// implementing the licenses references for the packages
+	licenses := make(map[string]string)
+	concludedLicenses := func(licenseTexts []string) string {
+		licenseRefs := make([]string, 0, len(licenseTexts))
+		for _, licenseText := range licenseTexts {
+			license := strings.SplitN(licenseText, ":", 2)[0]
+			if _, ok := licenses[license]; !ok {
+				licenseRef := "LicenseRef-" + replaceSlashes(license)
+				licenses[license] = licenseRef
+			}
+
+			licenseRefs = append(licenseRefs, licenses[license])
+		}
+		if len(licenseRefs) > 1 {
+			return "(" + strings.Join(licenseRefs, " AND ") + ")"
+		} else if len(licenseRefs) == 1 {
+			return licenseRefs[0]
+		}
+		return "NONE"
+	}
+
+	isMainPackage := true
+	visitedNodes := make(map[*compliance.TargetNode]struct{})
+
+	// performing a Breadth-first top down walk of licensegraph and building package information
+	compliance.WalkTopDownBreadthFirst(nil, lg,
+		func(lg *compliance.LicenseGraph, tn *compliance.TargetNode, path compliance.TargetEdgePath) bool {
+			if err != nil {
+				return false
+			}
+			var pm *projectmetadata.ProjectMetadata
+			pm, err = getProjectMetadata(ctx, pmix, tn)
+			if err != nil {
+				return false
+			}
+
+			if isMainPackage {
+				docName = getDocumentName(ctx, tn, pm)
+				mainPkgName = replaceSlashes(getPackageName(ctx, tn))
+				isMainPackage = false
+			}
+
+			if len(path) == 0 {
+				// Add the describe relationship for the main package
+				rln := &spdx.Relationship{
+					RefA:         common.MakeDocElementID("" /* this document */, "DOCUMENT"),
+					RefB:         common.MakeDocElementID("", mainPkgName),
+					Relationship: "DESCRIBES",
+				}
+				relationships = append(relationships, rln)
+
+			} else {
+				// Check parent and identify annotation
+				parent := path[len(path)-1]
+				targetEdge := parent.Edge()
+				if targetEdge.IsRuntimeDependency() {
+					// Adding the dynamic link annotation RUNTIME_DEPENDENCY_OF relationship
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					}
+					relationships = append(relationships, rln)
+
+				} else if targetEdge.IsDerivation() {
+					// Adding the  derivation annotation as a CONTAINS relationship
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						Relationship: "CONTAINS",
+					}
+					relationships = append(relationships, rln)
+
+				} else if targetEdge.IsBuildTool() {
+					// Adding the toolchain annotation as a BUILD_TOOL_OF relationship
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						Relationship: "BUILD_TOOL_OF",
+					}
+					relationships = append(relationships, rln)
+
+				} else {
+					panic(fmt.Errorf("Unknown dependency type: %v", targetEdge.Annotations()))
+				}
+			}
+
+			if _, alreadyVisited := visitedNodes[tn]; alreadyVisited {
+				return false
+			}
+			visitedNodes[tn] = struct{}{}
+			pkgName := getPackageName(ctx, tn)
+
+			// Making an spdx package and adding it to pkgs
+			pkg := &spdx.Package{
+				PackageName:             replaceSlashes(pkgName),
+				PackageDownloadLocation: getDownloadUrl(ctx, pm),
+				PackageSPDXIdentifier:   common.ElementID(replaceSlashes(pkgName)),
+				PackageLicenseConcluded: concludedLicenses(tn.LicenseTexts()),
+			}
+
+			if pm != nil && pm.Version() != "" {
+				pkg.PackageVersion = pm.Version()
+			} else {
+				pkg.PackageVersion = NOASSERTION
+			}
+
+			pkgs = append(pkgs, pkg)
+
+			return true
+		})
+
+	// Adding Non-standard licenses
+
+	licenseTexts := make([]string, 0, len(licenses))
+
+	for licenseText := range licenses {
+		licenseTexts = append(licenseTexts, licenseText)
+	}
+
+	sort.Strings(licenseTexts)
+
+	for _, licenseText := range licenseTexts {
+		// open the file
+		f, err := ctx.rootFS.Open(filepath.Clean(licenseText))
+		if err != nil {
+			return nil, nil, fmt.Errorf("error opening license text file %q: %w", licenseText, err)
+		}
+
+		// read the file
+		text, err := io.ReadAll(f)
+		if err != nil {
+			return nil, nil, fmt.Errorf("error reading license text file %q: %w", licenseText, err)
+		}
+		// Making an spdx License and adding it to otherLicenses
+		otherLicenses = append(otherLicenses, &spdx.OtherLicense{
+			LicenseName:       strings.Replace(licenses[licenseText], "LicenseRef-", "", -1),
+			LicenseIdentifier: string(licenses[licenseText]),
+			ExtractedText:     string(text),
+		})
+	}
+
+	deps := inputFiles(lg, pmix, licenseTexts)
+	sort.Strings(deps)
+
+	// Making the SPDX doc
+	ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
+	if err != nil {
+		return nil, nil, fmt.Errorf("Unable to build creation info section for SPDX doc: %v\n", err)
+	}
+
+	ci.Created = ctx.creationTime()
+
+	doc := &spdx.Document{
+		SPDXVersion:       "SPDX-2.2",
+		DataLicense:       "CC0-1.0",
+		SPDXIdentifier:    "DOCUMENT",
+		DocumentName:      docName,
+		DocumentNamespace: generateSPDXNamespace(ci.Created),
+		CreationInfo:      ci,
+		Packages:          pkgs,
+		Relationships:     relationships,
+		OtherLicenses:     otherLicenses,
+	}
+
+	if err := spdxlib.ValidateDocument2_2(doc); err != nil {
+		return nil, nil, fmt.Errorf("Unable to validate the SPDX doc: %v\n", err)
+	}
+
+	return doc, deps, nil
+}
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
new file mode 100644
index 0000000..6472f51
--- /dev/null
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -0,0 +1,2467 @@
+// Copyright 2022 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"bytes"
+	"encoding/json"
+	"fmt"
+	"os"
+	"reflect"
+	"strings"
+	"testing"
+	"time"
+
+	"android/soong/tools/compliance"
+	"github.com/spdx/tools-golang/builder/builder2v2"
+	"github.com/spdx/tools-golang/spdx/common"
+	spdx "github.com/spdx/tools-golang/spdx/v2_2"
+)
+
+func TestMain(m *testing.M) {
+	// Change into the parent directory before running the tests
+	// so they can find the testdata directory.
+	if err := os.Chdir(".."); err != nil {
+		fmt.Printf("failed to change to testdata directory: %s\n", err)
+		os.Exit(1)
+	}
+	os.Exit(m.Run())
+}
+
+func Test(t *testing.T) {
+	tests := []struct {
+		condition    string
+		name         string
+		outDir       string
+		roots        []string
+		stripPrefix  string
+		expectedOut  *spdx.Document
+		expectedDeps []string
+	}{
+		{
+			condition: "firstparty",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "firstparty",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-application",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			condition: "firstparty",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-container.zip",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "firstparty",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			condition: "firstparty",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-container.zip",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-application",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			condition: "notice",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-application",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			condition: "reciprocal",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-container.zip",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			condition: "restricted",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-container.zip",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "application",
+			roots:     []string{"application.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-application",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/restricted/RESTRICTED_LICENSE",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "binary",
+			roots:     []string{"bin/bin1.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+			},
+		},
+		{
+			condition: "proprietary",
+			name:      "library",
+			roots:     []string{"lib/libd.so.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
+			},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
+		},
+	}
+	for _, tt := range tests {
+		t.Run(tt.condition+" "+tt.name, func(t *testing.T) {
+			stdout := &bytes.Buffer{}
+			stderr := &bytes.Buffer{}
+
+			rootFiles := make([]string, 0, len(tt.roots))
+			for _, r := range tt.roots {
+				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
+			}
+
+			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime}
+
+			spdxDoc, deps, err := sbomGenerator(&ctx, rootFiles...)
+			if err != nil {
+				t.Fatalf("sbom: error = %v, stderr = %v", err, stderr)
+				return
+			}
+			if stderr.Len() > 0 {
+				t.Errorf("sbom: gotStderr = %v, want none", stderr)
+			}
+
+			if err := validate(spdxDoc); err != nil {
+				t.Fatalf("sbom: document fails to validate: %v", err)
+			}
+
+			gotData, err := json.Marshal(spdxDoc)
+			if err != nil {
+				t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
+				return
+			}
+
+			t.Logf("Got SPDX Doc: %s", string(gotData))
+
+			expectedData, err := json.Marshal(tt.expectedOut)
+			if err != nil {
+				t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
+				return
+			}
+
+			t.Logf("Want SPDX Doc: %s", string(expectedData))
+
+			// compare the spdx Docs
+			compareSpdxDocs(t, spdxDoc, tt.expectedOut)
+
+			// compare deps
+			t.Logf("got deps: %q", deps)
+
+			t.Logf("want deps: %q", tt.expectedDeps)
+
+			if g, w := deps, tt.expectedDeps; !reflect.DeepEqual(g, w) {
+				t.Errorf("unexpected deps, wanted:\n%s\ngot:\n%s\n",
+					strings.Join(w, "\n"), strings.Join(g, "\n"))
+			}
+		})
+	}
+}
+
+func getCreationInfo(t *testing.T) *spdx.CreationInfo {
+	ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
+	if err != nil {
+		t.Errorf("Unable to get creation info: %v", err)
+		return nil
+	}
+	return ci
+}
+
+// validate returns an error if the Document is found to be invalid
+func validate(doc *spdx.Document) error {
+	if doc.SPDXVersion == "" {
+		return fmt.Errorf("SPDXVersion: got nothing, want spdx version")
+	}
+	if doc.DataLicense == "" {
+		return fmt.Errorf("DataLicense: got nothing, want Data License")
+	}
+	if doc.SPDXIdentifier == "" {
+		return fmt.Errorf("SPDXIdentifier: got nothing, want SPDX Identifier")
+	}
+	if doc.DocumentName == "" {
+		return fmt.Errorf("DocumentName: got nothing, want Document Name")
+	}
+	if fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator) != "Google LLC" {
+		return fmt.Errorf("Creator: got %v, want  'Google LLC'")
+	}
+	_, err := time.Parse(time.RFC3339, doc.CreationInfo.Created)
+	if err != nil {
+		return fmt.Errorf("Invalid time spec: %q: got error %q, want no error", doc.CreationInfo.Created, err)
+	}
+
+	for _, license := range doc.OtherLicenses {
+		if license.ExtractedText == "" {
+			return fmt.Errorf("License file: %q: got nothing, want license text", license.LicenseName)
+		}
+	}
+	return nil
+}
+
+// compareSpdxDocs deep-compares two spdx docs by going through the info section, packages, relationships and licenses
+func compareSpdxDocs(t *testing.T, actual, expected *spdx.Document) {
+
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: SPDX Doc is nil! Got %v: Expected %v", actual, expected)
+	}
+
+	if actual.DocumentName != expected.DocumentName {
+		t.Errorf("sbom: unexpected SPDX Document Name got %q, want %q", actual.DocumentName, expected.DocumentName)
+	}
+
+	if actual.SPDXVersion != expected.SPDXVersion {
+		t.Errorf("sbom: unexpected SPDX Version got %s, want %s", actual.SPDXVersion, expected.SPDXVersion)
+	}
+
+	if actual.DataLicense != expected.DataLicense {
+		t.Errorf("sbom: unexpected SPDX DataLicense got %s, want %s", actual.DataLicense, expected.DataLicense)
+	}
+
+	if actual.SPDXIdentifier != expected.SPDXIdentifier {
+		t.Errorf("sbom: unexpected SPDX Identified got %s, want %s", actual.SPDXIdentifier, expected.SPDXIdentifier)
+	}
+
+	if actual.DocumentNamespace != expected.DocumentNamespace {
+		t.Errorf("sbom: unexpected SPDX Document Namespace got %s, want %s", actual.DocumentNamespace, expected.DocumentNamespace)
+	}
+
+	// compare creation info
+	compareSpdxCreationInfo(t, actual.CreationInfo, expected.CreationInfo)
+
+	// compare packages
+	if len(actual.Packages) != len(expected.Packages) {
+		t.Errorf("SBOM: Number of Packages is different! Got %d: Expected %d", len(actual.Packages), len(expected.Packages))
+	}
+
+	for i, pkg := range actual.Packages {
+		if !compareSpdxPackages(t, i, pkg, expected.Packages[i]) {
+			break
+		}
+	}
+
+	// compare licenses
+	if len(actual.OtherLicenses) != len(expected.OtherLicenses) {
+		t.Errorf("SBOM: Number of Licenses in actual is different! Got %d: Expected %d", len(actual.OtherLicenses), len(expected.OtherLicenses))
+	}
+	for i, license := range actual.OtherLicenses {
+		if !compareLicenses(t, i, license, expected.OtherLicenses[i]) {
+			break
+		}
+	}
+
+	//compare Relationships
+	if len(actual.Relationships) != len(expected.Relationships) {
+		t.Errorf("SBOM: Number of Licenses in actual is different! Got %d: Expected %d", len(actual.Relationships), len(expected.Relationships))
+	}
+	for i, rl := range actual.Relationships {
+		if !compareRelationShips(t, i, rl, expected.Relationships[i]) {
+			break
+		}
+	}
+}
+
+func compareSpdxCreationInfo(t *testing.T, actual, expected *spdx.CreationInfo) {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Creation info is nil! Got %q: Expected %q", actual, expected)
+	}
+
+	if actual.LicenseListVersion != expected.LicenseListVersion {
+		t.Errorf("SBOM: Creation info license version Error! Got %s: Expected %s", actual.LicenseListVersion, expected.LicenseListVersion)
+	}
+
+	if len(actual.Creators) != len(expected.Creators) {
+		t.Errorf("SBOM: Creation info creators Error! Got %d: Expected %d", len(actual.Creators), len(expected.Creators))
+	}
+
+	for i, info := range actual.Creators {
+		if info != expected.Creators[i] {
+			t.Errorf("SBOM: Creation info creators Error! Got %q: Expected %q", info, expected.Creators[i])
+		}
+	}
+}
+
+func compareSpdxPackages(t *testing.T, i int, actual, expected *spdx.Package) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Packages are nil at index %d! Got %v: Expected %v", i, actual, expected)
+		return false
+	}
+	if actual.PackageName != expected.PackageName {
+		t.Errorf("SBOM: Package name Error at index %d! Got %s: Expected %s", i, actual.PackageName, expected.PackageName)
+		return false
+	}
+
+	if actual.PackageVersion != expected.PackageVersion {
+		t.Errorf("SBOM: Package version Error at index %d! Got %s: Expected %s", i, actual.PackageVersion, expected.PackageVersion)
+		return false
+	}
+
+	if actual.PackageSPDXIdentifier != expected.PackageSPDXIdentifier {
+		t.Errorf("SBOM: Package identifier Error at index %d! Got %s: Expected %s", i, actual.PackageSPDXIdentifier, expected.PackageSPDXIdentifier)
+		return false
+	}
+
+	if actual.PackageDownloadLocation != expected.PackageDownloadLocation {
+		t.Errorf("SBOM: Package download location Error at index %d! Got %s: Expected %s", i, actual.PackageDownloadLocation, expected.PackageDownloadLocation)
+		return false
+	}
+
+	if actual.PackageLicenseConcluded != expected.PackageLicenseConcluded {
+		t.Errorf("SBOM: Package license concluded Error at index %d! Got %s: Expected %s", i, actual.PackageLicenseConcluded, expected.PackageLicenseConcluded)
+		return false
+	}
+	return true
+}
+
+func compareRelationShips(t *testing.T, i int, actual, expected *spdx.Relationship) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Relationships is nil at index %d! Got %v: Expected %v", i, actual, expected)
+		return false
+	}
+
+	if actual.RefA != expected.RefA {
+		t.Errorf("SBOM: Relationship RefA Error at index %d! Got %s: Expected %s", i, actual.RefA, expected.RefA)
+		return false
+	}
+
+	if actual.RefB != expected.RefB {
+		t.Errorf("SBOM: Relationship RefB Error at index %d! Got %s: Expected %s", i, actual.RefB, expected.RefB)
+		return false
+	}
+
+	if actual.Relationship != expected.Relationship {
+		t.Errorf("SBOM: Relationship type Error at index %d! Got %s: Expected %s", i, actual.Relationship, expected.Relationship)
+		return false
+	}
+	return true
+}
+
+func compareLicenses(t *testing.T, i int, actual, expected *spdx.OtherLicense) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Licenses is nil at index %d! Got %v: Expected %v", i, actual, expected)
+		return false
+	}
+
+	if actual.LicenseName != expected.LicenseName {
+		t.Errorf("SBOM: License Name Error at index %d! Got %s: Expected %s", i, actual.LicenseName, expected.LicenseName)
+		return false
+	}
+
+	if actual.LicenseIdentifier != expected.LicenseIdentifier {
+		t.Errorf("SBOM: License Identifier Error at index %d! Got %s: Expected %s", i, actual.LicenseIdentifier, expected.LicenseIdentifier)
+		return false
+	}
+
+	if actual.ExtractedText != expected.ExtractedText {
+		t.Errorf("SBOM: License Extracted Text Error at index %d! Got: %q want: %q", i, actual.ExtractedText, expected.ExtractedText)
+		return false
+	}
+	return true
+}
+
+func fakeTime() string {
+	t := time.UnixMicro(0)
+	return t.UTC().Format("2006-01-02T15:04:05Z")
+}
diff --git a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
index 7ef14e9..859be7f 100644
--- a/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/proprietary/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
 module_classes: "EXECUTABLES"
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_if_statically_linked"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/regressconcur/README.md b/tools/compliance/cmd/testdata/regressconcur/README.md
new file mode 100644
index 0000000..98ab0ef
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/README.md
@@ -0,0 +1,62 @@
+## Start long walks followed by short walks
+
+Detect concurrency error where "already started" treated as
+"already finished".
+
+### Testdata build graph structure:
+
+A restricted licensed library sandwiched between a notice library and a notice
+binary. The source-code for the libraries only needs to be shared if shipped
+alongside the container with the binaries.
+
+```dot
+strict digraph {
+	rankdir=LR;
+	bin1 [label="bin/bin1.meta_lic\nproprietary"];
+	bin2 [label="bin/bin2.meta_lic\nproprietary"];
+	bin3 [label="bin/bin3.meta_lic\nproprietary"];
+	bin4 [label="bin/bin4.meta_lic\nproprietary"];
+	bin5 [label="bin/bin5.meta_lic\nproprietary"];
+	bin6 [label="bin/bin6.meta_lic\nproprietary"];
+	bin7 [label="bin/bin7.meta_lic\nproprietary"];
+	bin8 [label="bin/bin8.meta_lic\nproprietary"];
+	bin9 [label="bin/bin9.meta_lic\nproprietary"];
+	container [label="container.zip.meta_lic\nnotice"];
+	lib1 [label="lib/lib1.so.meta_lic\nnotice"];
+	lib2 [label="lib/lib2.so.meta_lic\nnotice"];
+	lib3 [label="lib/lib3.so.meta_lic\nnotice"];
+	lib4 [label="lib/lib4.so.meta_lic\nnotice"];
+	lib5 [label="lib/lib5.so.meta_lic\nnotice"];
+	lib6 [label="lib/lib6.so.meta_lic\nnotice"];
+	lib7 [label="lib/lib7.so.meta_lic\nnotice"];
+	lib8 [label="lib/lib8.so.meta_lic\nnotice"];
+	lib9 [label="lib/lib9.so.meta_lic\nrestricted"];
+	container -> bin1 [label="static"];
+	container -> bin2 [label="static"];
+	container -> bin3 [label="static"];
+	container -> bin4 [label="static"];
+	container -> bin5 [label="static"];
+	container -> bin6 [label="static"];
+	container -> bin7 [label="static"];
+	container -> bin8 [label="static"];
+	container -> bin9 [label="static"];
+	bin1 -> lib1 [label="static"];
+	bin2 -> lib2 [label="static"];
+	bin3 -> lib3 [label="static"];
+	bin4 -> lib4 [label="static"];
+	bin5 -> lib5 [label="static"];
+	bin6 -> lib6 [label="static"];
+	bin7 -> lib7 [label="static"];
+	bin8 -> lib8 [label="static"];
+	bin9 -> lib9 [label="static"];
+	lib1 -> lib2 [label="static"];
+	lib2 -> lib3 [label="static"];
+	lib3 -> lib4 [label="static"];
+	lib4 -> lib5 [label="static"];
+	lib5 -> lib6 [label="static"];
+	lib6 -> lib7 [label="static"];
+	lib7 -> lib8 [label="static"];
+	lib8 -> lib9 [label="static"];
+	{rank=same; container}
+}
+```
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin1.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin1.meta_lic
new file mode 100644
index 0000000..e3763f6
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin1.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin1"
+installed:  "out/target/product/fictional/system/bin/bin1"
+sources:  "out/target/product/fictional/system/lib/lib1.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib1.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin2.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin2.meta_lic
new file mode 100644
index 0000000..e1822bb
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin2.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin2"
+installed:  "out/target/product/fictional/system/bin/bin2"
+sources:  "out/target/product/fictional/system/lib/lib2.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib2.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin3.meta_lic
new file mode 100644
index 0000000..35f5d74
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin3.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
+installed:  "out/target/product/fictional/system/bin/bin3"
+sources:  "out/target/product/fictional/system/lib/lib3.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib3.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin4.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin4.meta_lic
new file mode 100644
index 0000000..3287382
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin4.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin4"
+installed:  "out/target/product/fictional/system/bin/bin4"
+sources:  "out/target/product/fictional/system/lib/lib4.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib4.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin5.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin5.meta_lic
new file mode 100644
index 0000000..f51bcdb
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin5.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin5"
+installed:  "out/target/product/fictional/system/bin/bin5"
+sources:  "out/target/product/fictional/system/lib/lib5.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib5.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin6.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin6.meta_lic
new file mode 100644
index 0000000..4c99b01
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin6.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin6"
+installed:  "out/target/product/fictional/system/bin/bin6"
+sources:  "out/target/product/fictional/system/lib/lib6.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib6.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin7.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin7.meta_lic
new file mode 100644
index 0000000..565a338
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin7.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin7"
+installed:  "out/target/product/fictional/system/bin/bin7"
+sources:  "out/target/product/fictional/system/lib/lib7.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib7.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin8.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin8.meta_lic
new file mode 100644
index 0000000..f66c2c9
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin8.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin8"
+installed:  "out/target/product/fictional/system/bin/bin8"
+sources:  "out/target/product/fictional/system/lib/lib8.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib8.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/bin/bin9.meta_lic b/tools/compliance/cmd/testdata/regressconcur/bin/bin9.meta_lic
new file mode 100644
index 0000000..0aac4c6
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/bin/bin9.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+module_classes: "EXECUTABLES"
+projects:  "bin/onelibrary"
+license_kinds:  "legacy_proprietary"
+license_conditions:  "proprietary"
+is_container:  false
+built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin9"
+installed:  "out/target/product/fictional/system/bin/bin9"
+sources:  "out/target/product/fictional/system/lib/lib9.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib9.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/container.zip.meta_lic b/tools/compliance/cmd/testdata/regressconcur/container.zip.meta_lic
new file mode 100644
index 0000000..88683d4
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/container.zip.meta_lic
@@ -0,0 +1,60 @@
+package_name:  "Android"
+projects:  "container/zip"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  true
+built:  "out/target/product/fictional/obj/ETC/container_intermediates/container.zip"
+installed:  "out/target/product/fictional/data/container.zip"
+install_map {
+  from_path:  "out/target/product/fictional/system/lib/"
+  container_path:  "/"
+}
+install_map {
+  from_path:  "out/target/product/fictional/system/bin/"
+  container_path:  "/"
+}
+sources:  "out/target/product/fictional/system/bin/bin1"
+sources:  "out/target/product/fictional/system/bin/bin2"
+sources:  "out/target/product/fictional/system/bin/bin3"
+sources:  "out/target/product/fictional/system/bin/bin4"
+sources:  "out/target/product/fictional/system/bin/bin5"
+sources:  "out/target/product/fictional/system/bin/bin6"
+sources:  "out/target/product/fictional/system/bin/bin7"
+sources:  "out/target/product/fictional/system/bin/bin8"
+sources:  "out/target/product/fictional/system/bin/bin9"
+deps:  {
+  file:  "testdata/regressconcur/bin/bin1.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin2.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin3.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin4.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin5.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin6.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin7.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin8.meta_lic"
+  annotations:  "static"
+}
+deps:  {
+  file:  "testdata/regressconcur/bin/bin9.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib1.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib1.a.meta_lic
new file mode 100644
index 0000000..89ebd7c
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib1.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib1.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib1.a"
+installed:  "out/target/product/fictional/system/lib/lib1.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib2.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib2.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib2.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib2.a.meta_lic
new file mode 100644
index 0000000..ec84287
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib2.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib2.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib2.a"
+installed:  "out/target/product/fictional/system/lib/lib2.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib3.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib3.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib3.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib3.a.meta_lic
new file mode 100644
index 0000000..1949c06
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib3.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib3.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib3.a"
+installed:  "out/target/product/fictional/system/lib/lib3.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib4.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib4.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib4.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib4.a.meta_lic
new file mode 100644
index 0000000..4dc777b
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib4.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib4.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib4.a"
+installed:  "out/target/product/fictional/system/lib/lib4.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib5.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib5.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib5.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib5.a.meta_lic
new file mode 100644
index 0000000..5d005dc
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib5.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib5.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib5.a"
+installed:  "out/target/product/fictional/system/lib/lib5.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib6.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib6.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib6.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib6.a.meta_lic
new file mode 100644
index 0000000..f2920c3
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib6.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib6.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib6.a"
+installed:  "out/target/product/fictional/system/lib/lib6.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib7.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib7.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib7.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib7.a.meta_lic
new file mode 100644
index 0000000..28c0c5f
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib7.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib7.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib7.a"
+installed:  "out/target/product/fictional/system/lib/lib7.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib8.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib8.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib8.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib8.a.meta_lic
new file mode 100644
index 0000000..b887edc
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib8.a.meta_lic
@@ -0,0 +1,13 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-MIT"
+license_conditions:  "notice"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib8.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib8.a"
+installed:  "out/target/product/fictional/system/lib/lib8.so"
+sources:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib9.a"
+deps:  {
+  file:  "testdata/regressconcur/lib/lib9.a.meta_lic"
+  annotations:  "static"
+}
diff --git a/tools/compliance/cmd/testdata/regressconcur/lib/lib9.a.meta_lic b/tools/compliance/cmd/testdata/regressconcur/lib/lib9.a.meta_lic
new file mode 100644
index 0000000..9bf155f
--- /dev/null
+++ b/tools/compliance/cmd/testdata/regressconcur/lib/lib9.a.meta_lic
@@ -0,0 +1,8 @@
+package_name:  "Android"
+projects:  "lib/restricted"
+license_kinds:  "SPDX-license-identifier-GPL-2.0"
+license_conditions:  "restricted"
+is_container:  false
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib9.so"
+built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/lib9.a"
+installed:  "out/target/product/fictional/system/lib/lib9.so"
diff --git a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
index 7ef14e9..859be7f 100644
--- a/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/bin/bin3.meta_lic
@@ -2,7 +2,7 @@
 module_classes: "EXECUTABLES"
 projects:  "standalone/binary"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_if_statically_linked"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/EXECUTABLES/bin_intermediates/bin3"
diff --git a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
index a505d4a..ce5de6e 100644
--- a/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
+++ b/tools/compliance/cmd/testdata/restricted/lib/liba.so.meta_lic
@@ -1,7 +1,7 @@
 package_name:  "Device"
 projects:  "device/library"
 license_kinds:  "SPDX-license-identifier-LGPL-2.0"
-license_conditions:  "restricted"
+license_conditions:  "restricted_if_statically_linked"
 license_texts:  "testdata/restricted/RESTRICTED_LICENSE"
 is_container:  false
 built:  "out/target/product/fictional/obj/SHARED_LIBRARIES/lib_intermediates/liba.so"
diff --git a/tools/compliance/cmd/textnotice/textnotice.go b/tools/compliance/cmd/textnotice/textnotice.go
index 9beaf58..450290c 100644
--- a/tools/compliance/cmd/textnotice/textnotice.go
+++ b/tools/compliance/cmd/textnotice/textnotice.go
@@ -23,6 +23,7 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
 	"android/soong/response"
@@ -230,7 +231,8 @@
 		fmt.Fprintln(ctx.stdout)
 	}
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/textnotice/textnotice_test.go b/tools/compliance/cmd/textnotice/textnotice_test.go
index e661a44..a902313 100644
--- a/tools/compliance/cmd/textnotice/textnotice_test.go
+++ b/tools/compliance/cmd/textnotice/textnotice_test.go
@@ -65,7 +65,16 @@
 				usedBy{"highest.apex/lib/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -81,7 +90,16 @@
 				usedBy{"container.zip/libb.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -93,7 +111,13 @@
 				usedBy{"application"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -105,7 +129,12 @@
 				usedBy{"bin/bin1"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -117,7 +146,10 @@
 				usedBy{"lib/libd.so"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -142,6 +174,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -167,6 +206,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -186,6 +232,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -207,6 +257,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -219,7 +272,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -244,6 +300,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -269,6 +332,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -288,6 +358,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -309,6 +383,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -323,6 +400,7 @@
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -353,6 +431,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -383,6 +468,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -402,6 +494,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -426,6 +522,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -438,7 +537,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -468,6 +570,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -499,6 +608,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -519,6 +635,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -540,6 +660,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -552,7 +675,10 @@
 				usedBy{"lib/libd.so"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice.go b/tools/compliance/cmd/xmlnotice/xmlnotice.go
index 2097b7c..c3f8e4c 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice.go
@@ -24,6 +24,7 @@
 	"io/fs"
 	"os"
 	"path/filepath"
+	"sort"
 	"strings"
 
 	"android/soong/response"
@@ -238,7 +239,8 @@
 	}
 	fmt.Fprintln(ctx.stdout, "</licenses>")
 
-	*ctx.deps = ni.InputNoticeFiles()
+	*ctx.deps = ni.InputFiles()
+	sort.Strings(*ctx.deps)
 
 	return nil
 }
diff --git a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
index 731e783..551006f 100644
--- a/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
+++ b/tools/compliance/cmd/xmlnotice/xmlnotice_test.go
@@ -65,7 +65,16 @@
 				target{"highest.apex/lib/libb.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/highest.apex.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -79,7 +88,16 @@
 				target{"container.zip/libb.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/bin/bin2.meta_lic",
+				"testdata/firstparty/container.zip.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -89,7 +107,13 @@
 				target{"application", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/application.meta_lic",
+				"testdata/firstparty/bin/bin3.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libb.so.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -99,7 +123,12 @@
 				target{"bin/bin1", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/bin/bin1.meta_lic",
+				"testdata/firstparty/lib/liba.so.meta_lic",
+				"testdata/firstparty/lib/libc.a.meta_lic",
+			},
 		},
 		{
 			condition: "firstparty",
@@ -109,7 +138,10 @@
 				target{"lib/libd.so", "Android"},
 				firstParty{},
 			},
-			expectedDeps: []string{"testdata/firstparty/FIRST_PARTY_LICENSE"},
+			expectedDeps: []string{
+				"testdata/firstparty/FIRST_PARTY_LICENSE",
+				"testdata/firstparty/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "notice",
@@ -129,6 +161,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/highest.apex.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -149,6 +188,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/bin/bin2.meta_lic",
+				"testdata/notice/container.zip.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
+				"testdata/notice/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -164,6 +210,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/application.meta_lic",
+				"testdata/notice/bin/bin3.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -180,6 +230,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/bin/bin1.meta_lic",
+				"testdata/notice/lib/liba.so.meta_lic",
+				"testdata/notice/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -190,7 +243,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/notice/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "reciprocal",
@@ -210,6 +266,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/highest.apex.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -230,6 +293,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/bin/bin2.meta_lic",
+				"testdata/reciprocal/container.zip.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -245,6 +315,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/application.meta_lic",
+				"testdata/reciprocal/bin/bin3.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -261,6 +335,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
+				"testdata/reciprocal/bin/bin1.meta_lic",
+				"testdata/reciprocal/lib/liba.so.meta_lic",
+				"testdata/reciprocal/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -271,7 +348,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/reciprocal/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "restricted",
@@ -294,6 +374,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/highest.apex.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -317,6 +404,13 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/bin/bin2.meta_lic",
+				"testdata/restricted/container.zip.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
+				"testdata/restricted/lib/libd.so.meta_lic",
 			},
 		},
 		{
@@ -332,6 +426,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/application.meta_lic",
+				"testdata/restricted/bin/bin3.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -350,6 +448,9 @@
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/reciprocal/RECIPROCAL_LICENSE",
 				"testdata/restricted/RESTRICTED_LICENSE",
+				"testdata/restricted/bin/bin1.meta_lic",
+				"testdata/restricted/lib/liba.so.meta_lic",
+				"testdata/restricted/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -360,7 +461,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/restricted/lib/libd.so.meta_lic",
+			},
 		},
 		{
 			condition: "proprietary",
@@ -382,6 +486,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/highest.apex.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -405,6 +516,13 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/bin/bin2.meta_lic",
+				"testdata/proprietary/container.zip.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
+				"testdata/proprietary/lib/libd.so.meta_lic",
 				"testdata/restricted/RESTRICTED_LICENSE",
 			},
 		},
@@ -421,6 +539,10 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/application.meta_lic",
+				"testdata/proprietary/bin/bin3.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libb.so.meta_lic",
 			},
 		},
 		{
@@ -437,6 +559,9 @@
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
 				"testdata/proprietary/PROPRIETARY_LICENSE",
+				"testdata/proprietary/bin/bin1.meta_lic",
+				"testdata/proprietary/lib/liba.so.meta_lic",
+				"testdata/proprietary/lib/libc.a.meta_lic",
 			},
 		},
 		{
@@ -447,7 +572,10 @@
 				target{"lib/libd.so", "External"},
 				notice{},
 			},
-			expectedDeps: []string{"testdata/notice/NOTICE_LICENSE"},
+			expectedDeps: []string{
+				"testdata/notice/NOTICE_LICENSE",
+				"testdata/proprietary/lib/libd.so.meta_lic",
+			},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/condition.go b/tools/compliance/condition.go
index 3145249..2aac78c 100644
--- a/tools/compliance/condition.go
+++ b/tools/compliance/condition.go
@@ -58,15 +58,15 @@
 var (
 	// RecognizedConditionNames maps condition strings to LicenseCondition.
 	RecognizedConditionNames = map[string]LicenseCondition{
-		"unencumbered":                        UnencumberedCondition,
-		"permissive":                          PermissiveCondition,
-		"notice":                              NoticeCondition,
-		"reciprocal":                          ReciprocalCondition,
-		"restricted":                          RestrictedCondition,
-		"restricted_allows_dynamic_linking":   WeaklyRestrictedCondition,
-		"proprietary":                         ProprietaryCondition,
-		"by_exception_only":                   ByExceptionOnlyCondition,
-		"not_allowed":                         NotAllowedCondition,
+		"unencumbered":                    UnencumberedCondition,
+		"permissive":                      PermissiveCondition,
+		"notice":                          NoticeCondition,
+		"reciprocal":                      ReciprocalCondition,
+		"restricted":                      RestrictedCondition,
+		"restricted_if_statically_linked": WeaklyRestrictedCondition,
+		"proprietary":                     ProprietaryCondition,
+		"by_exception_only":               ByExceptionOnlyCondition,
+		"not_allowed":                     NotAllowedCondition,
 	}
 )
 
@@ -84,7 +84,7 @@
 	case RestrictedCondition:
 		return "restricted"
 	case WeaklyRestrictedCondition:
-		return "restricted_allows_dynamic_linking"
+		return "restricted_if_statically_linked"
 	case ProprietaryCondition:
 		return "proprietary"
 	case ByExceptionOnlyCondition:
diff --git a/tools/compliance/conditionset_test.go b/tools/compliance/conditionset_test.go
index 020cc0c..e31360d 100644
--- a/tools/compliance/conditionset_test.go
+++ b/tools/compliance/conditionset_test.go
@@ -96,18 +96,18 @@
 		{
 			name:       "everything",
 			conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
-			plus:       &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+			plus:       &[]string{"restricted_if_statically_linked", "by_exception_only", "not_allowed"},
 			matchingAny: map[string][]string{
-				"unencumbered":                        []string{"unencumbered"},
-				"permissive":                          []string{"permissive"},
-				"notice":                              []string{"notice"},
-				"reciprocal":                          []string{"reciprocal"},
-				"restricted":                          []string{"restricted"},
-				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
-				"proprietary":                         []string{"proprietary"},
-				"by_exception_only":                   []string{"by_exception_only"},
-				"not_allowed":                         []string{"not_allowed"},
-				"notice|proprietary":                  []string{"notice", "proprietary"},
+				"unencumbered":                    []string{"unencumbered"},
+				"permissive":                      []string{"permissive"},
+				"notice":                          []string{"notice"},
+				"reciprocal":                      []string{"reciprocal"},
+				"restricted":                      []string{"restricted"},
+				"restricted_if_statically_linked": []string{"restricted_if_statically_linked"},
+				"proprietary":                     []string{"proprietary"},
+				"by_exception_only":               []string{"by_exception_only"},
+				"not_allowed":                     []string{"not_allowed"},
+				"notice|proprietary":              []string{"notice", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -115,7 +115,7 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -129,7 +129,7 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -148,7 +148,7 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -157,18 +157,18 @@
 		{
 			name:       "allbutone",
 			conditions: []string{"unencumbered", "permissive", "notice", "reciprocal", "restricted", "proprietary"},
-			plus:       &[]string{"restricted_allows_dynamic_linking", "by_exception_only", "not_allowed"},
+			plus:       &[]string{"restricted_if_statically_linked", "by_exception_only", "not_allowed"},
 			matchingAny: map[string][]string{
-				"unencumbered":                        []string{"unencumbered"},
-				"permissive":                          []string{"permissive"},
-				"notice":                              []string{"notice"},
-				"reciprocal":                          []string{"reciprocal"},
-				"restricted":                          []string{"restricted"},
-				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
-				"proprietary":                         []string{"proprietary"},
-				"by_exception_only":                   []string{"by_exception_only"},
-				"not_allowed":                         []string{"not_allowed"},
-				"notice|proprietary":                  []string{"notice", "proprietary"},
+				"unencumbered":                    []string{"unencumbered"},
+				"permissive":                      []string{"permissive"},
+				"notice":                          []string{"notice"},
+				"reciprocal":                      []string{"reciprocal"},
+				"restricted":                      []string{"restricted"},
+				"restricted_if_statically_linked": []string{"restricted_if_statically_linked"},
+				"proprietary":                     []string{"proprietary"},
+				"by_exception_only":               []string{"by_exception_only"},
+				"not_allowed":                     []string{"not_allowed"},
+				"notice|proprietary":              []string{"notice", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -176,7 +176,7 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -190,23 +190,23 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
 			},
-			minus: &[]string{"restricted_allows_dynamic_linking"},
+			minus: &[]string{"restricted_if_statically_linked"},
 			matchingAny: map[string][]string{
-				"unencumbered":                        []string{"unencumbered"},
-				"permissive":                          []string{"permissive"},
-				"notice":                              []string{"notice"},
-				"reciprocal":                          []string{"reciprocal"},
-				"restricted":                          []string{"restricted"},
-				"restricted_allows_dynamic_linking":   []string{},
-				"proprietary":                         []string{"proprietary"},
-				"by_exception_only":                   []string{"by_exception_only"},
-				"not_allowed":                         []string{"not_allowed"},
-				"restricted|proprietary":              []string{"restricted", "proprietary"},
+				"unencumbered":                    []string{"unencumbered"},
+				"permissive":                      []string{"permissive"},
+				"notice":                          []string{"notice"},
+				"reciprocal":                      []string{"reciprocal"},
+				"restricted":                      []string{"restricted"},
+				"restricted_if_statically_linked": []string{},
+				"proprietary":                     []string{"proprietary"},
+				"by_exception_only":               []string{"by_exception_only"},
+				"not_allowed":                     []string{"not_allowed"},
+				"restricted|proprietary":          []string{"restricted", "proprietary"},
 			},
 			expected: []string{
 				"unencumbered",
@@ -227,7 +227,7 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
@@ -238,41 +238,41 @@
 				"notice",
 				"reciprocal",
 				"restricted",
-				"restricted_allows_dynamic_linking",
+				"restricted_if_statically_linked",
 				"proprietary",
 				"by_exception_only",
 				"not_allowed",
 			},
 			matchingAny: map[string][]string{
-				"unencumbered":                        []string{},
-				"permissive":                          []string{},
-				"notice":                              []string{},
-				"reciprocal":                          []string{},
-				"restricted":                          []string{},
-				"restricted_allows_dynamic_linking":   []string{},
-				"proprietary":                         []string{},
-				"by_exception_only":                   []string{},
-				"not_allowed":                         []string{},
-				"restricted|proprietary":              []string{},
+				"unencumbered":                    []string{},
+				"permissive":                      []string{},
+				"notice":                          []string{},
+				"reciprocal":                      []string{},
+				"restricted":                      []string{},
+				"restricted_if_statically_linked": []string{},
+				"proprietary":                     []string{},
+				"by_exception_only":               []string{},
+				"not_allowed":                     []string{},
+				"restricted|proprietary":          []string{},
 			},
 			expected: []string{},
 		},
 		{
 			name:       "restrictedplus",
-			conditions: []string{"restricted", "restricted_allows_dynamic_linking"},
+			conditions: []string{"restricted", "restricted_if_statically_linked"},
 			plus:       &[]string{"permissive", "notice", "restricted", "proprietary"},
 			matchingAny: map[string][]string{
-				"unencumbered":                        []string{},
-				"permissive":                          []string{"permissive"},
-				"notice":                              []string{"notice"},
-				"restricted":                          []string{"restricted"},
-				"restricted_allows_dynamic_linking":   []string{"restricted_allows_dynamic_linking"},
-				"proprietary":                         []string{"proprietary"},
-				"restricted|proprietary":              []string{"restricted", "proprietary"},
-				"by_exception_only":                   []string{},
-				"proprietary|by_exception_only":       []string{"proprietary"},
+				"unencumbered":                    []string{},
+				"permissive":                      []string{"permissive"},
+				"notice":                          []string{"notice"},
+				"restricted":                      []string{"restricted"},
+				"restricted_if_statically_linked": []string{"restricted_if_statically_linked"},
+				"proprietary":                     []string{"proprietary"},
+				"restricted|proprietary":          []string{"restricted", "proprietary"},
+				"by_exception_only":               []string{},
+				"proprietary|by_exception_only":   []string{"proprietary"},
 			},
-			expected: []string{"permissive", "notice", "restricted", "restricted_allows_dynamic_linking", "proprietary"},
+			expected: []string{"permissive", "notice", "restricted", "restricted_if_statically_linked", "proprietary"},
 		},
 	}
 	for _, tt := range tests {
diff --git a/tools/compliance/doc.go b/tools/compliance/doc.go
index a47c1cf..5ced9ee 100644
--- a/tools/compliance/doc.go
+++ b/tools/compliance/doc.go
@@ -11,6 +11,10 @@
 // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 // See the License for the specific language governing permissions and
 // limitations under the License.
+
+// Much of this content appears too in README.md
+// When changing this file consider whether the change also applies to README.md
+
 /*
 
 Package compliance provides an approved means for reading, consuming, and
@@ -31,6 +35,13 @@
 artifacts in a release or distribution. While conceptually immutable, parts of
 the graph may be loaded or evaluated lazily.
 
+Conceptually, the graph itself will always be a directed acyclic graph. One
+representation is a set of directed edges. Another is a set of nodes with
+directed edges to their dependencies.
+
+The edges have annotations, which can distinguish between build tools, runtime
+dependencies, and dependencies like 'contains' that make a derivative work.
+
 LicenseCondition
 ----------------
 
@@ -51,17 +62,13 @@
 
 `ActsOn` is the target to share, give notice for, hide etc.
 
-`Resolves` is the license condition that the action resolves.
+`Resolves` is the set of condition types that the action resolves.
 
-Remember: Each license condition pairs a condition name with an originating
-target so each resolution in a ResolutionSet has two targets it applies to and
-one target from which it originates, all of which may be the same target.
-
-For most condition types, `ActsOn` and `Resolves.Origin` will be the same
-target. For example, a notice condition policy means attribution or notice must
-be given for the target where the condition originates. Likewise, a proprietary
-condition policy means the privacy of the target where the condition originates
-must be respected. i.e. The thing acted on is the origin.
+For most condition types, `ActsOn` will be the target where the condition
+originated. For example, a notice condition policy means attribution or notice
+must be given for the target where the condition originates. Likewise, a
+proprietary condition policy means the privacy of the target where the
+condition originates must be respected. i.e. The thing acted on is the origin.
 
 Restricted conditions are different. The infectious nature of restricted often
 means sharing code that is not the target where the restricted condition
diff --git a/tools/compliance/graph.go b/tools/compliance/graph.go
index e73ab46..9ad319b 100644
--- a/tools/compliance/graph.go
+++ b/tools/compliance/graph.go
@@ -58,13 +58,11 @@
 	/// (guarded by mu)
 	targets map[string]*TargetNode
 
-	// wgBU becomes non-nil when the bottom-up resolve begins and reaches 0
-	// (i.e. Wait() proceeds) when the bottom-up resolve completes. (guarded by mu)
-	wgBU *sync.WaitGroup
+	// onceBottomUp makes sure the bottom-up resolve walk only happens one time.
+	onceBottomUp sync.Once
 
-	// wgTD becomes non-nil when the top-down resolve begins and reaches 0 (i.e. Wait()
-	// proceeds) when the top-down resolve completes. (guarded by mu)
-	wgTD *sync.WaitGroup
+	// onceTopDown makes sure the top-down resolve walk only happens one time.
+	onceTopDown sync.Once
 
 	// shippedNodes caches the results of a full walk of nodes identifying targets
 	// distributed either directly or as derivative works. (creation guarded by mu)
@@ -90,6 +88,15 @@
 	return targets
 }
 
+// TargetNames returns the list of target node names in the graph. (unordered)
+func (lg *LicenseGraph) TargetNames() []string {
+	targets := make([]string, 0, len(lg.targets))
+	for target := range lg.targets {
+		targets = append(targets, target)
+	}
+	return targets
+}
+
 // compliance-only LicenseGraph methods
 
 // newLicenseGraph constructs a new, empty instance of LicenseGraph.
@@ -139,6 +146,24 @@
 	return e.annotations
 }
 
+// IsRuntimeDependency returns true for edges representing shared libraries
+// linked dynamically at runtime.
+func (e *TargetEdge) IsRuntimeDependency() bool {
+	return edgeIsDynamicLink(e)
+}
+
+// IsDerivation returns true for edges where the target is a derivative
+// work of dependency.
+func (e *TargetEdge) IsDerivation() bool {
+	return edgeIsDerivation(e)
+}
+
+// IsBuildTool returns true for edges where the target is built
+// by dependency.
+func (e *TargetEdge) IsBuildTool() bool {
+	return !edgeIsDerivation(e) && !edgeIsDynamicLink(e)
+}
+
 // String returns a human-readable string representation of the edge.
 func (e *TargetEdge) String() string {
 	return fmt.Sprintf("%s -[%s]> %s", e.target.name, strings.Join(e.annotations.AsList(), ", "), e.dependency.name)
@@ -188,6 +213,11 @@
 	return s.edge.dependency
 }
 
+// Edge describes the target edge.
+func (s TargetEdgePathSegment) Edge() *TargetEdge {
+	return s.edge
+}
+
 // Annotations describes the type of edge by the set of annotations attached to
 // it.
 //
@@ -300,21 +330,9 @@
 	return tn.proto.GetPackageName()
 }
 
-// ModuleTypes returns the list of module types implementing the target.
-// (unordered)
-//
-// In an ideal world, only 1 module type would implement each target, but the
-// interactions between Soong and Make for host versus product and for a
-// variety of architectures sometimes causes multiple module types per target
-// (often a regular build target and a prebuilt.)
-func (tn *TargetNode) ModuleTypes() []string {
-	return append([]string{}, tn.proto.ModuleTypes...)
-}
-
-// ModuleClasses returns the list of module classes implementing the target.
-// (unordered)
-func (tn *TargetNode) ModuleClasses() []string {
-	return append([]string{}, tn.proto.ModuleClasses...)
+// ModuleName returns the module name of the target.
+func (tn *TargetNode) ModuleName() string {
+	return tn.proto.GetModuleName()
 }
 
 // Projects returns the projects defining the target node. (unordered)
@@ -326,14 +344,6 @@
 	return append([]string{}, tn.proto.Projects...)
 }
 
-// LicenseKinds returns the list of license kind names for the module or
-// target. (unordered)
-//
-// e.g. SPDX-license-identifier-MIT or legacy_proprietary
-func (tn *TargetNode) LicenseKinds() []string {
-	return append([]string{}, tn.proto.LicenseKinds...)
-}
-
 // LicenseConditions returns a copy of the set of license conditions
 // originating at the target. The values that appear and how each is resolved
 // is a matter of policy. (unordered)
@@ -458,36 +468,25 @@
 }
 
 // TargetNodeSet describes a set of distinct nodes in a license graph.
-type TargetNodeSet struct {
-	nodes map[*TargetNode]struct{}
-}
+type TargetNodeSet map[*TargetNode]struct{}
 
 // Contains returns true when `target` is an element of the set.
-func (ts *TargetNodeSet) Contains(target *TargetNode) bool {
-	_, isPresent := ts.nodes[target]
+func (ts TargetNodeSet) Contains(target *TargetNode) bool {
+	_, isPresent := ts[target]
 	return isPresent
 }
 
-// AsList returns the list of target nodes in the set. (unordered)
-func (ts *TargetNodeSet) AsList() TargetNodeList {
-	result := make(TargetNodeList, 0, len(ts.nodes))
-	for tn := range ts.nodes {
-		result = append(result, tn)
-	}
-	return result
-}
-
 // Names returns the array of target node namess in the set. (unordered)
-func (ts *TargetNodeSet) Names() []string {
-	result := make([]string, 0, len(ts.nodes))
-	for tn := range ts.nodes {
+func (ts TargetNodeSet) Names() []string {
+	result := make([]string, 0, len(ts))
+	for tn := range ts {
 		result = append(result, tn.name)
 	}
 	return result
 }
 
 // String returns a human-readable string representation of the set.
-func (ts *TargetNodeSet) String() string {
+func (ts TargetNodeSet) String() string {
 	return fmt.Sprintf("{%s}", strings.Join(ts.Names(), ", "))
 }
 
diff --git a/tools/compliance/noticeindex.go b/tools/compliance/noticeindex.go
index 86d42ac..c91a8df 100644
--- a/tools/compliance/noticeindex.go
+++ b/tools/compliance/noticeindex.go
@@ -15,7 +15,6 @@
 package compliance
 
 import (
-	"bufio"
 	"crypto/md5"
 	"fmt"
 	"io"
@@ -25,16 +24,11 @@
 	"regexp"
 	"sort"
 	"strings"
-)
 
-const (
-	noProjectName = "\u2205"
+	"android/soong/tools/compliance/projectmetadata"
 )
 
 var (
-	nameRegexp         = regexp.MustCompile(`^\s*name\s*:\s*"(.*)"\s*$`)
-	descRegexp         = regexp.MustCompile(`^\s*description\s*:\s*"(.*)"\s*$`)
-	versionRegexp      = regexp.MustCompile(`^\s*version\s*:\s*"(.*)"\s*$`)
 	licensesPathRegexp = regexp.MustCompile(`licen[cs]es?/`)
 )
 
@@ -43,10 +37,12 @@
 type NoticeIndex struct {
 	// lg identifies the license graph to which the index applies.
 	lg *LicenseGraph
+	// pmix indexes project metadata
+	pmix *projectmetadata.Index
 	// rs identifies the set of resolutions upon which the index is based.
 	rs ResolutionSet
 	// shipped identifies the set of target nodes shipped directly or as derivative works.
-	shipped *TargetNodeSet
+	shipped TargetNodeSet
 	// rootFS locates the root of the file system from which to read the files.
 	rootFS fs.FS
 	// hash maps license text filenames to content hashes
@@ -75,6 +71,7 @@
 	}
 	ni := &NoticeIndex{
 		lg:             lg,
+		pmix:           projectmetadata.NewIndex(rootFS),
 		rs:             rs,
 		shipped:        ShippedNodes(lg),
 		rootFS:         rootFS,
@@ -110,9 +107,12 @@
 		return hashes, nil
 	}
 
-	link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) {
+	link := func(tn *TargetNode, hashes map[hash]struct{}, installPaths []string) error {
 		for h := range hashes {
-			libName := ni.getLibName(tn, h)
+			libName, err := ni.getLibName(tn, h)
+			if err != nil {
+				return err
+			}
 			if _, ok := ni.libHash[libName]; !ok {
 				ni.libHash[libName] = make(map[hash]struct{})
 			}
@@ -145,6 +145,11 @@
 				}
 			}
 		}
+		return nil
+	}
+
+	cacheMetadata := func(tn *TargetNode) {
+		ni.pmix.MetadataForProjects(tn.Projects()...)
 	}
 
 	// returns error from walk below.
@@ -157,13 +162,17 @@
 		if !ni.shipped.Contains(tn) {
 			return false
 		}
+		go cacheMetadata(tn)
 		installPaths := getInstallPaths(tn, path)
 		var hashes map[hash]struct{}
 		hashes, err = index(tn)
 		if err != nil {
 			return false
 		}
-		link(tn, hashes, installPaths)
+		err = link(tn, hashes, installPaths)
+		if err != nil {
+			return false
+		}
 		if tn.IsContainer() {
 			return true
 		}
@@ -173,7 +182,10 @@
 			if err != nil {
 				return false
 			}
-			link(r.actsOn, hashes, installPaths)
+			err = link(r.actsOn, hashes, installPaths)
+			if err != nil {
+				return false
+			}
 		}
 		return false
 	})
@@ -214,12 +226,18 @@
 		close(c)
 	}()
 	return c
+
 }
 
-// InputNoticeFiles returns the list of files that were hashed during IndexLicenseTexts.
-func (ni *NoticeIndex) InputNoticeFiles() []string {
-	files := append([]string(nil), ni.files...)
-	sort.Strings(files)
+// InputFiles returns the complete list of files read during indexing.
+func (ni *NoticeIndex) InputFiles() []string {
+	projectMeta := ni.pmix.AllMetadataFiles()
+	files := make([]string, 0, len(ni.files) + len(ni.lg.targets) + len(projectMeta))
+	files = append(files, ni.files...)
+	for f := range ni.lg.targets {
+		files = append(files, f)
+	}
+	files = append(files, projectMeta...)
 	return files
 }
 
@@ -308,15 +326,18 @@
 }
 
 // getLibName returns the name of the library associated with `noticeFor`.
-func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) string {
+func (ni *NoticeIndex) getLibName(noticeFor *TargetNode, h hash) (string, error) {
 	for _, text := range noticeFor.LicenseTexts() {
 		if !strings.Contains(text, ":") {
 			if ni.hash[text].key != h.key {
 				continue
 			}
-			ln := ni.checkMetadataForLicenseText(noticeFor, text)
+			ln, err := ni.checkMetadataForLicenseText(noticeFor, text)
+			if err != nil {
+				return "", err
+			}
 			if len(ln) > 0 {
-				return ln
+				return ln, nil
 			}
 			continue
 		}
@@ -331,17 +352,20 @@
 		if err != nil {
 			continue
 		}
-		return ln
+		return ln, nil
 	}
 	// use name from METADATA if available
-	ln := ni.checkMetadata(noticeFor)
+	ln, err := ni.checkMetadata(noticeFor)
+	if err != nil {
+		return "", err
+	}
 	if len(ln) > 0 {
-		return ln
+		return ln, nil
 	}
 	// use package_name: from license{} module if available
 	pn := noticeFor.PackageName()
 	if len(pn) > 0 {
-		return pn
+		return pn, nil
 	}
 	for _, p := range noticeFor.Projects() {
 		if strings.HasPrefix(p, "prebuilts/") {
@@ -385,7 +409,7 @@
 							match = match[:li]
 						}
 					}
-					return match
+					return match, nil
 				}
 				break
 			}
@@ -393,9 +417,9 @@
 		for _, safePathPrefix := range safePathPrefixes {
 			if strings.HasPrefix(p, safePathPrefix.prefix) {
 				if safePathPrefix.strip {
-					return p[len(safePathPrefix.prefix):]
+					return p[len(safePathPrefix.prefix):], nil
 				} else {
-					return p
+					return p, nil
 				}
 			}
 		}
@@ -410,35 +434,26 @@
 	if fi > 0 {
 		n = n[:fi]
 	}
-	return n
+	return n, nil
 }
 
 // checkMetadata tries to look up a library name from a METADATA file associated with `noticeFor`.
-func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) string {
-	for _, p := range noticeFor.Projects() {
-		if name, ok := ni.projectName[p]; ok {
-			if name == noProjectName {
-				continue
-			}
-			return name
-		}
-		name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
-		if err != nil {
-			ni.projectName[p] = noProjectName
-			continue
-		}
-		if len(name) == 0 {
-			ni.projectName[p] = noProjectName
-			continue
-		}
-		ni.projectName[p] = name
-		return name
+func (ni *NoticeIndex) checkMetadata(noticeFor *TargetNode) (string, error) {
+	pms, err := ni.pmix.MetadataForProjects(noticeFor.Projects()...)
+	if err != nil {
+		return "", err
 	}
-	return ""
+	for _, pm := range pms {
+		name := pm.VersionedName()
+		if name != "" {
+			return name, nil
+		}
+	}
+	return "", nil
 }
 
 // checkMetadataForLicenseText
-func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) string {
+func (ni *NoticeIndex) checkMetadataForLicenseText(noticeFor *TargetNode, licenseText string) (string, error) {
 	p := ""
 	for _, proj := range noticeFor.Projects() {
 		if strings.HasPrefix(licenseText, proj) {
@@ -456,79 +471,17 @@
 				p = filepath.Dir(p)
 				continue
 			}
-			return ""
+			return "", nil
 		}
 	}
-	if name, ok := ni.projectName[p]; ok {
-		if name == noProjectName {
-			return ""
-		}
-		return name
-	}
-	name, err := ni.checkMetadataFile(filepath.Join(p, "METADATA"))
-	if err == nil && len(name) > 0 {
-		ni.projectName[p] = name
-		return name
-	}
-	ni.projectName[p] = noProjectName
-	return ""
-}
-
-// checkMetadataFile tries to look up a library name from a METADATA file at `path`.
-func (ni *NoticeIndex) checkMetadataFile(path string) (string, error) {
-	f, err := ni.rootFS.Open(path)
+	pms, err := ni.pmix.MetadataForProjects(p)
 	if err != nil {
 		return "", err
 	}
-	name := ""
-	description := ""
-	version := ""
-	s := bufio.NewScanner(f)
-	for s.Scan() {
-		line := s.Text()
-		m := nameRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				name = m[1]
-			}
-			if version != "" {
-				break
-			}
-			continue
-		}
-		m = versionRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				version = m[1]
-			}
-			if name != "" {
-				break
-			}
-			continue
-		}
-		m = descRegexp.FindStringSubmatch(line)
-		if m != nil {
-			if 1 < len(m) && m[1] != "" {
-				description = m[1]
-			}
-		}
+	if pms == nil {
+		return "", nil
 	}
-	_ = s.Err()
-	_ = f.Close()
-	if name != "" {
-		if version != "" {
-			if version[0] == 'v' || version[0] == 'V' {
-				return name + "_" + version, nil
-			} else {
-				return name + "_v_" + version, nil
-			}
-		}
-		return name, nil
-	}
-	if description != "" {
-		return description, nil
-	}
-	return "", nil
+	return pms[0].VersionedName(), nil
 }
 
 // addText reads and indexes the content of a license text file.
diff --git a/tools/compliance/policy_policy.go b/tools/compliance/policy_policy.go
index 02d1d96..368a162 100644
--- a/tools/compliance/policy_policy.go
+++ b/tools/compliance/policy_policy.go
@@ -114,35 +114,9 @@
 
 // LicenseConditionSetFromNames returns a set containing the recognized `names` and
 // silently ignoring or discarding the unrecognized `names`.
-func LicenseConditionSetFromNames(tn *TargetNode, names ...string) LicenseConditionSet {
+func LicenseConditionSetFromNames(names ...string) LicenseConditionSet {
 	cs := NewLicenseConditionSet()
 	for _, name := range names {
-		if name == "restricted" {
-			if 0 == len(tn.LicenseKinds()) {
-				cs = cs.Plus(RestrictedCondition)
-				continue
-			}
-			hasLgpl := false
-			hasGeneric := false
-			for _, kind := range tn.LicenseKinds() {
-				if anyLgpl.MatchString(kind) {
-					cs = cs.Plus(WeaklyRestrictedCondition)
-					hasLgpl = true
-				} else if versionedGpl.MatchString(kind) {
-					cs = cs.Plus(RestrictedCondition)
-				} else if genericGpl.MatchString(kind) {
-					hasGeneric = true
-				} else if kind == "legacy_restricted" || ccBySa.MatchString(kind) {
-					cs = cs.Plus(RestrictedCondition)
-				} else {
-					cs = cs.Plus(RestrictedCondition)
-				}
-			}
-			if hasGeneric && !hasLgpl {
-				cs = cs.Plus(RestrictedCondition)
-			}
-			continue
-		}
 		if lc, ok := RecognizedConditionNames[name]; ok {
 			cs |= LicenseConditionSet(lc)
 		}
diff --git a/tools/compliance/policy_policy_test.go b/tools/compliance/policy_policy_test.go
index 6188eb2..f003314 100644
--- a/tools/compliance/policy_policy_test.go
+++ b/tools/compliance/policy_policy_test.go
@@ -49,8 +49,8 @@
 			name: "fponlgpl",
 			edge: annotated{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			expectedDepActions: []string{
-				"apacheBin.meta_lic:lgplLib.meta_lic:restricted_allows_dynamic_linking",
-				"lgplLib.meta_lic:lgplLib.meta_lic:restricted_allows_dynamic_linking",
+				"apacheBin.meta_lic:lgplLib.meta_lic:restricted_if_statically_linked",
+				"lgplLib.meta_lic:lgplLib.meta_lic:restricted_if_statically_linked",
 			},
 			expectedTargetConditions: []string{},
 		},
@@ -85,15 +85,15 @@
 			expectedTargetConditions: []string{},
 		},
 		{
-			name: "independentmodulestatic",
-			edge: annotated{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
-			expectedDepActions: []string{},
+			name:                     "independentmodulestatic",
+			edge:                     annotated{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
+			expectedDepActions:       []string{},
 			expectedTargetConditions: []string{},
 		},
 		{
-			name: "dependentmodule",
-			edge: annotated{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
-			expectedDepActions: []string{},
+			name:                     "dependentmodule",
+			edge:                     annotated{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
+			expectedDepActions:       []string{},
 			expectedTargetConditions: []string{},
 		},
 
@@ -101,7 +101,7 @@
 			name:                     "lgplonfp",
 			edge:                     annotated{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			expectedDepActions:       []string{},
-			expectedTargetConditions: []string{"lgplBin.meta_lic:restricted_allows_dynamic_linking"},
+			expectedTargetConditions: []string{"lgplBin.meta_lic:restricted_if_statically_linked"},
 		},
 		{
 			name:                     "lgplonfpdynamic",
diff --git a/tools/compliance/policy_resolve.go b/tools/compliance/policy_resolve.go
index 93335a9..0951ba1 100644
--- a/tools/compliance/policy_resolve.go
+++ b/tools/compliance/policy_resolve.go
@@ -14,10 +14,6 @@
 
 package compliance
 
-import (
-	"sync"
-)
-
 var (
 	// AllResolutions is a TraceConditions function that resolves all
 	// unfiltered license conditions.
@@ -49,84 +45,57 @@
 func TraceBottomUpConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
-	lg.mu.Lock()
-	wg := lg.wgBU
+	lg.onceBottomUp.Do(func() {
+		// amap identifes targets previously walked. (guarded by mu)
+		amap := make(map[*TargetNode]struct{})
 
-	if wg != nil {
-		lg.mu.Unlock()
-		wg.Wait()
-		return
-	}
-	wg = &sync.WaitGroup{}
-	wg.Add(1)
-	lg.wgBU = wg
-	lg.mu.Unlock()
+		var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
 
-	// amap identifes targets previously walked. (guarded by mu)
-	amap := make(map[*TargetNode]struct{})
-
-	// mu guards concurrent access to amap
-	var mu sync.Mutex
-
-	var walk func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet
-
-	walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
-		priorWalkResults := func() (LicenseConditionSet, bool) {
-			mu.Lock()
-			defer mu.Unlock()
-
-			if _, alreadyWalked := amap[target]; alreadyWalked {
-				if treatAsAggregate {
-					return target.resolution, true
+		walk = func(target *TargetNode, treatAsAggregate bool) LicenseConditionSet {
+			priorWalkResults := func() (LicenseConditionSet, bool) {
+				if _, alreadyWalked := amap[target]; alreadyWalked {
+					if treatAsAggregate {
+						return target.resolution, true
+					}
+					if !target.pure {
+						return target.resolution, true
+					}
+					// previously walked in a pure aggregate context,
+					// needs to walk again in non-aggregate context
+				} else {
+					target.resolution |= conditionsFn(target)
+					amap[target] = struct{}{}
 				}
-				if !target.pure {
-					return target.resolution, true
-				}
-				// previously walked in a pure aggregate context,
-				// needs to walk again in non-aggregate context
-			} else {
-				target.resolution |= conditionsFn(target)
-				amap[target] = struct{}{}
+				target.pure = treatAsAggregate
+				return target.resolution, false
 			}
-			target.pure = treatAsAggregate
-			return target.resolution, false
-		}
-		cs, alreadyWalked := priorWalkResults()
-		if alreadyWalked {
+			cs, alreadyWalked := priorWalkResults()
+			if alreadyWalked {
+				return cs
+			}
+
+			// add all the conditions from all the dependencies
+			for _, edge := range target.edges {
+				// walk dependency to get its conditions
+				dcs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
+
+				// turn those into the conditions that apply to the target
+				dcs = depConditionsPropagatingToTarget(lg, edge, dcs, treatAsAggregate)
+				cs |= dcs
+			}
+			target.resolution |= cs
+			cs = target.resolution
+
+			// return conditions up the tree
 			return cs
 		}
 
-		c := make(chan LicenseConditionSet, len(target.edges))
-		// add all the conditions from all the dependencies
-		for _, edge := range target.edges {
-			go func(edge *TargetEdge) {
-				// walk dependency to get its conditions
-				cs := walk(edge.dependency, treatAsAggregate && edge.dependency.IsContainer())
-
-				// turn those into the conditions that apply to the target
-				cs = depConditionsPropagatingToTarget(lg, edge, cs, treatAsAggregate)
-
-				c <- cs
-			}(edge)
+		// walk each of the roots
+		for _, rname := range lg.rootFiles {
+			rnode := lg.targets[rname]
+			_ = walk(rnode, rnode.IsContainer())
 		}
-		for i := 0; i < len(target.edges); i++ {
-			cs |= <-c
-		}
-		mu.Lock()
-		target.resolution |= cs
-		mu.Unlock()
-
-		// return conditions up the tree
-		return cs
-	}
-
-	// walk each of the roots
-	for _, rname := range lg.rootFiles {
-		rnode := lg.targets[rname]
-		_ = walk(rnode, rnode.IsContainer())
-	}
-
-	wg.Done()
+	})
 }
 
 // ResolveTopDownCondtions performs a top-down walk of the LicenseGraph
@@ -145,78 +114,61 @@
 func TraceTopDownConditions(lg *LicenseGraph, conditionsFn TraceConditions) {
 
 	// short-cut if already walked and cached
-	lg.mu.Lock()
-	wg := lg.wgTD
+	lg.onceTopDown.Do(func() {
+		// start with the conditions propagated up the graph
+		TraceBottomUpConditions(lg, conditionsFn)
 
-	if wg != nil {
-		lg.mu.Unlock()
-		wg.Wait()
-		return
-	}
-	wg = &sync.WaitGroup{}
-	wg.Add(1)
-	lg.wgTD = wg
-	lg.mu.Unlock()
+		// amap contains the set of targets already walked. (guarded by mu)
+		amap := make(map[*TargetNode]struct{})
 
-	// start with the conditions propagated up the graph
-	TraceBottomUpConditions(lg, conditionsFn)
+		var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
 
-	// amap contains the set of targets already walked. (guarded by mu)
-	amap := make(map[*TargetNode]struct{})
-
-	// mu guards concurrent access to amap
-	var mu sync.Mutex
-
-	var walk func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool)
-
-	walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
-		defer wg.Done()
-		mu.Lock()
-		fnode.resolution |= conditionsFn(fnode)
-		fnode.resolution |= cs
-		fnode.pure = treatAsAggregate
-		amap[fnode] = struct{}{}
-		cs = fnode.resolution
-		mu.Unlock()
-		// for each dependency
-		for _, edge := range fnode.edges {
-			func(edge *TargetEdge) {
-				// dcs holds the dpendency conditions inherited from the target
-				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
-				dnode := edge.dependency
-				mu.Lock()
-				defer mu.Unlock()
-				depcs := dnode.resolution
-				_, alreadyWalked := amap[dnode]
-				if !dcs.IsEmpty() && alreadyWalked {
-					if dcs.Difference(depcs).IsEmpty() {
+		walk = func(fnode *TargetNode, cs LicenseConditionSet, treatAsAggregate bool) {
+			continueWalk := func() bool {
+				if _, alreadyWalked := amap[fnode]; alreadyWalked {
+					if cs.IsEmpty() {
+						return false
+					}
+					if cs.Difference(fnode.resolution).IsEmpty() {
 						// no new conditions
 
 						// pure aggregates never need walking a 2nd time with same conditions
 						if treatAsAggregate {
-							return
+							return false
 						}
 						// non-aggregates don't need walking as non-aggregate a 2nd time
-						if !dnode.pure {
-							return
+						if !fnode.pure {
+							return false
 						}
 						// previously walked as pure aggregate; need to re-walk as non-aggregate
 					}
+				} else {
+					fnode.resolution |= conditionsFn(fnode)
 				}
+				fnode.resolution |= cs
+				fnode.pure = treatAsAggregate
+				amap[fnode] = struct{}{}
+				cs = fnode.resolution
+				return true
+			}()
+			if !continueWalk {
+				return
+			}
+			// for each dependency
+			for _, edge := range fnode.edges {
+				// dcs holds the dpendency conditions inherited from the target
+				dcs := targetConditionsPropagatingToDep(lg, edge, cs, treatAsAggregate, conditionsFn)
+				dnode := edge.dependency
 				// add the conditions to the dependency
-				wg.Add(1)
-				go walk(dnode, dcs, treatAsAggregate && dnode.IsContainer())
-			}(edge)
+				walk(dnode, dcs, treatAsAggregate && dnode.IsContainer())
+			}
 		}
-	}
 
-	// walk each of the roots
-	for _, rname := range lg.rootFiles {
-		rnode := lg.targets[rname]
-		wg.Add(1)
-		// add the conditions to the root and its transitive closure
-		go walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
-	}
-	wg.Done()
-	wg.Wait()
+		// walk each of the roots
+		for _, rname := range lg.rootFiles {
+			rnode := lg.targets[rname]
+			// add the conditions to the root and its transitive closure
+			walk(rnode, NewLicenseConditionSet(), rnode.IsContainer())
+		}
+	})
 }
diff --git a/tools/compliance/policy_resolve_test.go b/tools/compliance/policy_resolve_test.go
index d6731fe..f9ea6a1 100644
--- a/tools/compliance/policy_resolve_test.go
+++ b/tools/compliance/policy_resolve_test.go
@@ -204,8 +204,8 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheBin.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -216,7 +216,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -227,9 +227,9 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheContainer.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"apacheBin.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -240,9 +240,9 @@
 				{"apacheContainer.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheContainer.meta_lic", "notice|restricted_allows_dynamic_linking"},
+				{"apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -253,7 +253,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -266,7 +266,7 @@
 			expectedActions: []tcond{
 				{"apacheContainer.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -279,7 +279,7 @@
 			expectedActions: []tcond{
 				{"apacheContainer.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -500,9 +500,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheBin.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
-				{"mitLib.meta_lic", "notice|restricted_allows_dynamic_linking"},
+				{"apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"mitLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -514,7 +514,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplBin.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplBin.meta_lic", "restricted_if_statically_linked"},
 				{"mitLib.meta_lic", "notice"},
 			},
 		},
@@ -527,10 +527,10 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheContainer.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"apacheBin.meta_lic", "notice|restricted_allows_dynamic_linking"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
-				{"mitLib.meta_lic", "notice|restricted_allows_dynamic_linking"},
+				{"apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"mitLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -541,9 +541,9 @@
 				{"apacheContainer.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []tcond{
-				{"apacheContainer.meta_lic", "notice|restricted_allows_dynamic_linking"},
+				{"apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -555,7 +555,7 @@
 			},
 			expectedActions: []tcond{
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 				{"mitLib.meta_lic", "notice"},
 			},
 		},
@@ -569,7 +569,7 @@
 			expectedActions: []tcond{
 				{"apacheContainer.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -582,7 +582,7 @@
 			expectedActions: []tcond{
 				{"apacheContainer.meta_lic", "notice"},
 				{"apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "restricted_allows_dynamic_linking"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
diff --git a/tools/compliance/policy_resolvenotices_test.go b/tools/compliance/policy_resolvenotices_test.go
index ee5e3b8..b23e587 100644
--- a/tools/compliance/policy_resolvenotices_test.go
+++ b/tools/compliance/policy_resolvenotices_test.go
@@ -33,8 +33,8 @@
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -44,7 +44,7 @@
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -54,8 +54,8 @@
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -66,11 +66,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -81,8 +79,8 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -96,22 +94,17 @@
 				{"mitBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "mplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
-				{"mitBin.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mitBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "mitBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "mplLib.meta_lic", "reciprocal|restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "mplLib.meta_lic", "reciprocal|restricted"},
+				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
+				{"mitBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -122,12 +115,11 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -138,8 +130,8 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -150,11 +142,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"mitLib.meta_lic", "mitLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"mitLib.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "restricted"},
+				{"mitLib.meta_lic", "mitLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -168,14 +158,11 @@
 				{"mitBin.meta_lic", "mitLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"mitBin.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "mitBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -186,10 +173,9 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -200,12 +186,11 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -216,11 +201,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -231,8 +214,8 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -243,9 +226,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -257,18 +240,13 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"apacheContainer.meta_lic", "mitLib.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -279,12 +257,11 @@
 				{"apacheContainer.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -295,8 +272,8 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -307,9 +284,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -320,9 +297,9 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -333,10 +310,10 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -347,9 +324,9 @@
 				{"apacheContainer.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -360,10 +337,10 @@
 				{"apacheContainer.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"lgplLib.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -374,9 +351,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -387,9 +364,9 @@
 				{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"dependentModule.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -400,8 +377,8 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -412,9 +389,9 @@
 				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "mitLib.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -425,8 +402,8 @@
 				{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -437,9 +414,9 @@
 				{"dependentModule.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
-				{"dependentModule.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "mitLib.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 	}
diff --git a/tools/compliance/policy_resolveprivacy_test.go b/tools/compliance/policy_resolveprivacy_test.go
index e8c953a..d4d1967 100644
--- a/tools/compliance/policy_resolveprivacy_test.go
+++ b/tools/compliance/policy_resolveprivacy_test.go
@@ -57,7 +57,7 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
 			},
 		},
 		{
@@ -67,7 +67,7 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"gplBin.meta_lic", "proprietary.meta_lic", "proprietary"},
 			},
 		},
 	}
diff --git a/tools/compliance/policy_resolveshare_test.go b/tools/compliance/policy_resolveshare_test.go
index d49dfa8..4abd960 100644
--- a/tools/compliance/policy_resolveshare_test.go
+++ b/tools/compliance/policy_resolveshare_test.go
@@ -73,8 +73,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
+				{"lgplBin.meta_lic", "apacheLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -84,7 +84,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -94,7 +94,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -104,8 +104,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -115,9 +115,9 @@
 				{"gplContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplContainer.meta_lic", "gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"gplContainer.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "apacheLib.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -128,9 +128,9 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -141,9 +141,9 @@
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -153,7 +153,7 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -163,9 +163,9 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "apacheLib.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -207,8 +207,8 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "proprietary.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -218,8 +218,8 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "proprietary.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "proprietary.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -245,7 +245,7 @@
 				{"mitBin.meta_lic", "mplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
+				{"mitBin.meta_lic", "mplLib.meta_lic", "reciprocal"},
 			},
 		},
 		{
@@ -255,7 +255,7 @@
 				{"mplBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mplBin.meta_lic", "mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
+				{"mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
 			},
 		},
 	}
diff --git a/tools/compliance/policy_shipped.go b/tools/compliance/policy_shipped.go
index 75c8399..b21a95a 100644
--- a/tools/compliance/policy_shipped.go
+++ b/tools/compliance/policy_shipped.go
@@ -16,15 +16,15 @@
 
 // ShippedNodes returns the set of nodes in a license graph where the target or
 // a derivative work gets distributed. (caches result)
-func ShippedNodes(lg *LicenseGraph) *TargetNodeSet {
+func ShippedNodes(lg *LicenseGraph) TargetNodeSet {
 	lg.mu.Lock()
 	shipped := lg.shippedNodes
 	lg.mu.Unlock()
 	if shipped != nil {
-		return shipped
+		return *shipped
 	}
 
-	tset := make(map[*TargetNode]struct{})
+	tset := make(TargetNodeSet)
 
 	WalkTopDown(NoEdgeContext{}, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
 		if _, alreadyWalked := tset[tn]; alreadyWalked {
@@ -39,7 +39,7 @@
 		return true
 	})
 
-	shipped = &TargetNodeSet{tset}
+	shipped = &tset
 
 	lg.mu.Lock()
 	if lg.shippedNodes == nil {
@@ -50,5 +50,5 @@
 	}
 	lg.mu.Unlock()
 
-	return shipped
+	return *shipped
 }
diff --git a/tools/compliance/policy_walk.go b/tools/compliance/policy_walk.go
index beb6d53..e6b94ab 100644
--- a/tools/compliance/policy_walk.go
+++ b/tools/compliance/policy_walk.go
@@ -247,40 +247,16 @@
 // WalkActionsForCondition performs a top-down walk of the LicenseGraph
 // resolving all distributed works for `conditions`.
 func WalkActionsForCondition(lg *LicenseGraph, conditions LicenseConditionSet) ActionSet {
-	shipped := ShippedNodes(lg)
-
-	// cmap identifies previously walked target/condition pairs.
-	cmap := make(map[resolutionKey]struct{})
-
 	// amap maps 'actsOn' targets to the applicable conditions
 	//
 	// amap is the resulting ActionSet
 	amap := make(ActionSet)
-	WalkTopDown(ApplicableConditionsContext{conditions}, lg, func(lg *LicenseGraph, tn *TargetNode, path TargetEdgePath) bool {
-		universe := conditions
-		if len(path) > 0 {
-			universe = path[len(path)-1].ctx.(LicenseConditionSet)
+
+	for tn := range ShippedNodes(lg) {
+		if cs := conditions.Intersection(tn.resolution); !cs.IsEmpty() {
+			amap[tn] = cs
 		}
-		if universe.IsEmpty() {
-			return false
-		}
-		key := resolutionKey{tn, universe}
-		if _, ok := cmap[key]; ok {
-			return false
-		}
-		if !shipped.Contains(tn) {
-			return false
-		}
-		cs := universe.Intersection(tn.resolution)
-		if !cs.IsEmpty() {
-			if _, ok := amap[tn]; ok {
-				amap[tn] = cs
-			} else {
-				amap[tn] = amap[tn].Union(cs)
-			}
-		}
-		return true
-	})
+	}
 
 	return amap
 }
diff --git a/tools/compliance/policy_walk_test.go b/tools/compliance/policy_walk_test.go
index 0bc37f8..53af3be 100644
--- a/tools/compliance/policy_walk_test.go
+++ b/tools/compliance/policy_walk_test.go
@@ -48,8 +48,8 @@
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -60,8 +60,8 @@
 				{"mitBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mitBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
+				{"mitBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -72,9 +72,8 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted_if_statically_linked"},
+				{"apacheBin.meta_lic", "lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -85,7 +84,7 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -96,7 +95,7 @@
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -116,8 +115,8 @@
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -137,7 +136,7 @@
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
 			},
 		},
 		{
@@ -157,9 +156,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"lgplBin.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
+				{"lgplBin.meta_lic", "apacheLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -170,8 +168,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"lgplBin.meta_lic", "apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
+				{"lgplBin.meta_lic", "apacheLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -182,7 +180,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -193,7 +191,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -204,9 +202,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "apacheLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -217,8 +214,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -229,11 +226,9 @@
 				{"gplContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplContainer.meta_lic", "gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"gplContainer.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"gplContainer.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "apacheLib.meta_lic", "notice|restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -244,9 +239,9 @@
 				{"gplContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplContainer.meta_lic", "gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"gplContainer.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "apacheLib.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -258,12 +253,11 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheContainer.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -275,9 +269,9 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -289,11 +283,9 @@
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice|restricted"},
+				{"apacheBin.meta_lic", "apacheLib.meta_lic", "notice|restricted"},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -305,9 +297,9 @@
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "apacheLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheBin.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "apacheLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -318,7 +310,7 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -329,7 +321,7 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -340,9 +332,9 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "apacheLib.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -353,7 +345,7 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -382,8 +374,8 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
-				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -403,7 +395,7 @@
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
 			expectedResolutions: []res{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -432,9 +424,8 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
-				{"proprietary.meta_lic", "proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "proprietary.meta_lic", "restricted|proprietary"},
+				{"proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -445,8 +436,8 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"proprietary.meta_lic", "gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "proprietary.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -457,7 +448,7 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
 			},
 		},
 		{
@@ -468,9 +459,8 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
-				{"gplBin.meta_lic", "proprietary.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "proprietary.meta_lic", "restricted|proprietary"},
 			},
 		},
 		{
@@ -481,8 +471,8 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"gplBin.meta_lic", "proprietary.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "proprietary.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -493,7 +483,7 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"gplBin.meta_lic", "proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"gplBin.meta_lic", "proprietary.meta_lic", "proprietary"},
 			},
 		},
 		{
@@ -504,8 +494,8 @@
 				{"mitBin.meta_lic", "by_exception.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mitBin.meta_lic", "by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
+				{"mitBin.meta_lic", "by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -525,7 +515,7 @@
 				{"mitBin.meta_lic", "by_exception.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"mitBin.meta_lic", "by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -536,8 +526,8 @@
 				{"by_exception.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
-				{"by_exception.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"by_exception.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -557,7 +547,7 @@
 				{"by_exception.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -568,8 +558,8 @@
 				{"mitBin.meta_lic", "mplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mitBin.meta_lic", "mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
+				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
+				{"mitBin.meta_lic", "mplLib.meta_lic", "reciprocal"},
 			},
 		},
 		{
@@ -580,7 +570,7 @@
 				{"mitBin.meta_lic", "mplLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mitBin.meta_lic", "mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
+				{"mitBin.meta_lic", "mplLib.meta_lic", "reciprocal"},
 			},
 		},
 		{
@@ -591,8 +581,8 @@
 				{"mplBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mplBin.meta_lic", "mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
-				{"mplBin.meta_lic", "mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
+				{"mplBin.meta_lic", "mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -603,7 +593,7 @@
 				{"mplBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedResolutions: []res{
-				{"mplBin.meta_lic", "mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
+				{"mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
 			},
 		},
 	}
@@ -639,8 +629,8 @@
 				{"apacheBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "notice"},
+				{"apacheLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -651,8 +641,8 @@
 				{"mitBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"mitBin.meta_lic", "notice"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -663,9 +653,8 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "lgplLib.meta_lic", "restricted"},
-				{"lgplLib.meta_lic", "lgplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "notice"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -676,7 +665,7 @@
 				{"apacheBin.meta_lic", "lgplLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -687,7 +676,7 @@
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -707,8 +696,8 @@
 				{"apacheBin.meta_lic", "gplWithClasspathException.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"apacheBin.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -728,7 +717,7 @@
 				{"dependentModule.meta_lic", "gplWithClasspathException.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"dependentModule.meta_lic", "dependentModule.meta_lic", "notice"},
+				{"dependentModule.meta_lic", "notice"},
 			},
 		},
 		{
@@ -748,9 +737,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "restricted_if_statically_linked"},
+				{"apacheLib.meta_lic", "notice|restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -761,8 +749,8 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "restricted_if_statically_linked"},
+				{"apacheLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -773,7 +761,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -784,7 +772,7 @@
 				{"lgplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"lgplBin.meta_lic", "lgplBin.meta_lic", "restricted"},
+				{"lgplBin.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 		{
@@ -795,9 +783,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -808,8 +795,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -820,11 +807,8 @@
 				{"gplContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "notice|restricted"},
 			},
 		},
 		{
@@ -835,9 +819,8 @@
 				{"gplContainer.meta_lic", "apacheLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplContainer.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "gplContainer.meta_lic", "restricted"},
+				{"gplContainer.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -849,11 +832,9 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheContainer.meta_lic", "apacheContainer.meta_lic", "notice"},
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "notice|restricted"},
+				{"apacheLib.meta_lic", "notice"},
+				{"gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -865,8 +846,8 @@
 				{"apacheContainer.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheContainer.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheContainer.meta_lic", "restricted"},
+				{"gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -878,11 +859,9 @@
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "apacheLib.meta_lic", "notice"},
-				{"apacheLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "notice|restricted"},
+				{"apacheLib.meta_lic", "notice|restricted"},
+				{"gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -894,9 +873,9 @@
 				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"apacheBin.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"apacheBin.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "restricted"},
+				{"gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -907,7 +886,7 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -918,7 +897,7 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -929,8 +908,8 @@
 				{"gplBin.meta_lic", "apacheLib.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"apacheLib.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
+				{"apacheLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -941,7 +920,7 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -970,8 +949,8 @@
 				{"gplWithClasspathException.meta_lic", "apacheBin.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
-				{"apacheBin.meta_lic", "apacheBin.meta_lic", "notice"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
+				{"apacheBin.meta_lic", "notice"},
 			},
 		},
 		{
@@ -991,7 +970,7 @@
 				{"gplWithClasspathException.meta_lic", "dependentModule.meta_lic", []string{"dynamic"}},
 			},
 			expectedActions: []act{
-				{"gplWithClasspathException.meta_lic", "gplWithClasspathException.meta_lic", "permissive"},
+				{"gplWithClasspathException.meta_lic", "permissive"},
 			},
 		},
 		{
@@ -1020,9 +999,8 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
-				{"proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "restricted|proprietary"},
+				{"gplLib.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -1033,8 +1011,8 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplLib.meta_lic", "gplLib.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "gplLib.meta_lic", "restricted"},
+				{"gplLib.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -1045,7 +1023,7 @@
 				{"proprietary.meta_lic", "gplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"proprietary.meta_lic", "proprietary"},
 			},
 		},
 		{
@@ -1056,9 +1034,8 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
-				{"proprietary.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "restricted|proprietary"},
 			},
 		},
 		{
@@ -1069,8 +1046,8 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"gplBin.meta_lic", "gplBin.meta_lic", "restricted"},
-				{"proprietary.meta_lic", "gplBin.meta_lic", "restricted"},
+				{"gplBin.meta_lic", "restricted"},
+				{"proprietary.meta_lic", "restricted"},
 			},
 		},
 		{
@@ -1081,7 +1058,7 @@
 				{"gplBin.meta_lic", "proprietary.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"proprietary.meta_lic", "proprietary.meta_lic", "proprietary"},
+				{"proprietary.meta_lic", "proprietary"},
 			},
 		},
 		{
@@ -1092,8 +1069,8 @@
 				{"mitBin.meta_lic", "by_exception.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"mitBin.meta_lic", "notice"},
+				{"by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -1113,7 +1090,7 @@
 				{"mitBin.meta_lic", "by_exception.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -1124,8 +1101,8 @@
 				{"by_exception.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
-				{"mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"by_exception.meta_lic", "by_exception_only"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -1145,7 +1122,7 @@
 				{"by_exception.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"by_exception.meta_lic", "by_exception.meta_lic", "by_exception_only"},
+				{"by_exception.meta_lic", "by_exception_only"},
 			},
 		},
 		{
@@ -1156,8 +1133,8 @@
 				{"mitBin.meta_lic", "mplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mitBin.meta_lic", "mitBin.meta_lic", "notice"},
-				{"mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
+				{"mitBin.meta_lic", "notice"},
+				{"mplLib.meta_lic", "reciprocal"},
 			},
 		},
 		{
@@ -1168,7 +1145,7 @@
 				{"mitBin.meta_lic", "mplLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mplLib.meta_lic", "mplLib.meta_lic", "reciprocal"},
+				{"mplLib.meta_lic", "reciprocal"},
 			},
 		},
 		{
@@ -1179,8 +1156,8 @@
 				{"mplBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
-				{"mitLib.meta_lic", "mitLib.meta_lic", "notice"},
+				{"mplBin.meta_lic", "reciprocal"},
+				{"mitLib.meta_lic", "notice"},
 			},
 		},
 		{
@@ -1191,7 +1168,25 @@
 				{"mplBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
 			},
 			expectedActions: []act{
-				{"mplBin.meta_lic", "mplBin.meta_lic", "reciprocal"},
+				{"mplBin.meta_lic", "reciprocal"},
+			},
+		},
+		{
+			name:      "regress-walk-twice",
+			condition: ImpliesShared,
+			roots:     []string{"mitBin.meta_lic", "apacheBin.meta_lic", "gplLib.meta_lic"},
+			edges: []annotated{
+				{"apacheBin.meta_lic", "mitLib.meta_lic", []string{"dynamic"}},
+				{"apacheBin.meta_lic", "gplLib.meta_lic", []string{"dynamic"}},
+				{"mitBin.meta_lic", "mitLib.meta_lic", []string{"static"}},
+				{"mitBin.meta_lic", "lgplLib.meta_lic", []string{"static"}},
+			},
+			expectedActions: []act{
+				{"apacheBin.meta_lic", "restricted"},
+				{"mitLib.meta_lic", "restricted|restricted_if_statically_linked"},
+				{"gplLib.meta_lic", "restricted"},
+				{"mitBin.meta_lic", "restricted_if_statically_linked"},
+				{"lgplLib.meta_lic", "restricted_if_statically_linked"},
 			},
 		},
 	}
diff --git a/tools/compliance/projectmetadata/projectmetadata.go b/tools/compliance/projectmetadata/projectmetadata.go
index 1861b47..b137a12 100644
--- a/tools/compliance/projectmetadata/projectmetadata.go
+++ b/tools/compliance/projectmetadata/projectmetadata.go
@@ -58,6 +58,11 @@
 	return fmt.Sprintf("project: %q\n%s", pm.project, pm.proto.String())
 }
 
+// Project returns the path to the directory containing the METADATA file
+func (pm *ProjectMetadata) Project() string {
+	return pm.project
+}
+
 // ProjectName returns the name of the project.
 func (pm *ProjectMetadata) Name() string {
 	return pm.proto.GetName()
@@ -118,6 +123,7 @@
 // a `ProjectMetadata`, pm (can be nil even without error), or a non-nil `err`.
 type projectIndex struct {
 	project string
+	path    string
 	pm      *ProjectMetadata
 	err     error
 	done    chan struct{}
@@ -230,6 +236,19 @@
 	return result, nil
 }
 
+// AllMetadataFiles returns the sorted list of all METADATA files read thus far.
+func (ix *Index) AllMetadataFiles() []string {
+	var files []string
+	ix.projects.Range(func(key, value any) bool {
+		pi := value.(*projectIndex)
+		if pi.path != "" {
+			files = append(files, pi.path)
+		}
+		return true
+	})
+	return files
+}
+
 // readMetadataFile tries to read and parse a METADATA file at `path` for `project`.
 func (ix *Index) readMetadataFile(pi *projectIndex, path string) {
 	f, err := ix.rootFS.Open(path)
@@ -250,9 +269,24 @@
 	pm := &ProjectMetadata{project: pi.project}
 	err = uo.Unmarshal(data, &pm.proto)
 	if err != nil {
-		pi.err = fmt.Errorf("error in project %q metadata %q: %w", pi.project, path, err)
+		pi.err = fmt.Errorf(`error in project %q METADATA %q: %v
+
+METADATA and METADATA.android files must parse as text protobufs
+defined by
+   build/soong/compliance/project_metadata_proto/project_metadata.proto
+
+* unknown fields don't matter
+* check invalid ENUM names
+* check quoting
+* check unescaped nested quotes
+* check the comment marker for protobuf is '#' not '//'
+
+if importing a library that uses a different sort of METADATA file, add
+a METADATA.android file beside it to parse instead
+`, pi.project, path, err)
 		return
 	}
 
+	pi.path = path
 	pi.pm = pm
 }
diff --git a/tools/compliance/readgraph.go b/tools/compliance/readgraph.go
index bf364e6..a413ebe 100644
--- a/tools/compliance/readgraph.go
+++ b/tools/compliance/readgraph.go
@@ -175,7 +175,7 @@
 		}
 		lg.edges = make(TargetEdgeList, 0, esize)
 		for _, tn := range lg.targets {
-			tn.licenseConditions = LicenseConditionSetFromNames(tn, tn.proto.LicenseConditions...)
+			tn.licenseConditions = LicenseConditionSetFromNames(tn.proto.LicenseConditions...)
 			err = addDependencies(lg, tn)
 			if err != nil {
 				return nil, fmt.Errorf("error indexing dependencies for %q: %w", tn.name, err)
diff --git a/tools/compliance/resolutionset_test.go b/tools/compliance/resolutionset_test.go
index 89cdfeb..efdff82 100644
--- a/tools/compliance/resolutionset_test.go
+++ b/tools/compliance/resolutionset_test.go
@@ -27,48 +27,44 @@
 	// binc represents a compiler or other toolchain binary used for
 	// building the other binaries.
 	bottomUp = []res{
-		{"image", "image", "image", "notice"},
-		{"image", "image", "bin2", "restricted"},
-		{"image", "bin1", "bin1", "reciprocal"},
-		{"image", "bin2", "bin2", "restricted"},
-		{"image", "lib1", "lib1", "notice"},
-		{"image", "lib2", "lib2", "notice"},
-		{"binc", "binc", "binc", "proprietary"},
-		{"bin1", "bin1", "bin1", "reciprocal"},
-		{"bin1", "lib1", "lib1", "notice"},
-		{"bin2", "bin2", "bin2", "restricted"},
-		{"bin2", "lib2", "lib2", "notice"},
-		{"lib1", "lib1", "lib1", "notice"},
-		{"lib2", "lib2", "lib2", "notice"},
+		{"image", "image", "notice|restricted"},
+		{"image", "bin1", "reciprocal"},
+		{"image", "bin2", "restricted"},
+		{"image", "lib1", "notice"},
+		{"image", "lib2", "notice"},
+		{"binc", "binc", "proprietary"},
+		{"bin1", "bin1", "reciprocal"},
+		{"bin1", "lib1", "notice"},
+		{"bin2", "bin2", "restricted"},
+		{"bin2", "lib2", "notice"},
+		{"lib1", "lib1", "notice"},
+		{"lib2", "lib2", "notice"},
 	}
 
 	// notice describes bottomUp after a top-down notice resolve.
 	notice = []res{
-		{"image", "image", "image", "notice"},
-		{"image", "image", "bin2", "restricted"},
-		{"image", "bin1", "bin1", "reciprocal"},
-		{"image", "bin2", "bin2", "restricted"},
-		{"image", "lib1", "lib1", "notice"},
-		{"image", "lib2", "bin2", "restricted"},
-		{"image", "lib2", "lib2", "notice"},
-		{"bin1", "bin1", "bin1", "reciprocal"},
-		{"bin1", "lib1", "lib1", "notice"},
-		{"bin2", "bin2", "bin2", "restricted"},
-		{"bin2", "lib2", "bin2", "restricted"},
-		{"bin2", "lib2", "lib2", "notice"},
-		{"lib1", "lib1", "lib1", "notice"},
-		{"lib2", "lib2", "lib2", "notice"},
+		{"image", "image", "notice|restricted"},
+		{"image", "bin1", "reciprocal"},
+		{"image", "bin2", "restricted"},
+		{"image", "lib1", "notice"},
+		{"image", "lib2", "notice|restricted"},
+		{"bin1", "bin1", "reciprocal"},
+		{"bin1", "lib1", "notice"},
+		{"bin2", "bin2", "restricted"},
+		{"bin2", "lib2", "notice|restricted"},
+		{"lib1", "lib1", "notice"},
+		{"lib2", "lib2", "notice"},
 	}
 
 	// share describes bottomUp after a top-down share resolve.
 	share = []res{
-		{"image", "image", "bin2", "restricted"},
-		{"image", "bin1", "bin1", "reciprocal"},
-		{"image", "bin2", "bin2", "restricted"},
-		{"image", "lib2", "bin2", "restricted"},
-		{"bin1", "bin1", "bin1", "reciprocal"},
-		{"bin2", "bin2", "bin2", "restricted"},
-		{"bin2", "lib2", "bin2", "restricted"},
+		{"image", "image", "restricted"},
+		{"image", "bin1", "reciprocal"},
+		{"image", "bin2", "restricted"},
+		{"image", "lib2", "restricted"},
+		{"bin1", "bin1", "reciprocal"},
+		{"bin2", "bin2", "restricted"},
+		{"bin2", "lib2", "restricted"},
 	}
 
 	// proprietary describes bottomUp after a top-down proprietary resolve.
diff --git a/tools/compliance/test_util.go b/tools/compliance/test_util.go
index 6c50d3e..053b398 100644
--- a/tools/compliance/test_util.go
+++ b/tools/compliance/test_util.go
@@ -57,7 +57,7 @@
 	LGPL = `` +
 		`package_name: "Free Library"
 license_kinds: "SPDX-license-identifier-LGPL-2.0"
-license_conditions: "restricted"
+license_conditions: "restricted_if_statically_linked"
 `
 
 	// MPL starts a test metadata file for a module with MPL 2.0 reciprical licensing.
@@ -121,28 +121,24 @@
 	return tn
 }
 
-// newTestCondition constructs a test license condition in the license graph.
-func newTestCondition(lg *LicenseGraph, targetName string, conditionName string) LicenseCondition {
-	tn := newTestNode(lg, targetName)
-	cl := LicenseConditionSetFromNames(tn, conditionName).AsList()
+// newTestCondition constructs a test license condition.
+func newTestCondition(conditionName string) LicenseCondition {
+	cl := LicenseConditionSetFromNames(conditionName).AsList()
 	if len(cl) == 0 {
 		panic(fmt.Errorf("attempt to create unrecognized condition: %q", conditionName))
 	} else if len(cl) != 1 {
 		panic(fmt.Errorf("unexpected multiple conditions from condition name: %q: got %d, want 1", conditionName, len(cl)))
 	}
 	lc := cl[0]
-	tn.licenseConditions = tn.licenseConditions.Plus(lc)
 	return lc
 }
 
-// newTestConditionSet constructs a test license condition set in the license graph.
-func newTestConditionSet(lg *LicenseGraph, targetName string, conditionName []string) LicenseConditionSet {
-	tn := newTestNode(lg, targetName)
-	cs := LicenseConditionSetFromNames(tn, conditionName...)
+// newTestConditionSet constructs a test license condition set.
+func newTestConditionSet(conditionName []string) LicenseConditionSet {
+	cs := LicenseConditionSetFromNames(conditionName...)
 	if cs.IsEmpty() {
 		panic(fmt.Errorf("attempt to create unrecognized condition: %q", conditionName))
 	}
-	tn.licenseConditions = tn.licenseConditions.Union(cs)
 	return cs
 }
 
@@ -283,12 +279,12 @@
 
 // act describes test data resolution actions to define test action sets.
 type act struct {
-	actsOn, origin, condition string
+	actsOn, condition string
 }
 
 // String returns a human-readable string representing the test action.
 func (a act) String() string {
-	return fmt.Sprintf("%s{%s:%s}", a.actsOn, a.origin, a.condition)
+	return fmt.Sprintf("%s{%s}", a.actsOn, a.condition)
 }
 
 // toActionSet converts a list of act test data into a test action set.
@@ -296,7 +292,7 @@
 	as := make(ActionSet)
 	for _, a := range data {
 		actsOn := newTestNode(lg, a.actsOn)
-		cs := newTestConditionSet(lg, a.origin, strings.Split(a.condition, "|"))
+		cs := newTestConditionSet(strings.Split(a.condition, "|"))
 		as[actsOn] = cs
 	}
 	return as
@@ -304,7 +300,7 @@
 
 // res describes test data resolutions to define test resolution sets.
 type res struct {
-	attachesTo, actsOn, origin, condition string
+	attachesTo, actsOn, condition string
 }
 
 // toResolutionSet converts a list of res test data into a test resolution set.
@@ -316,7 +312,7 @@
 		if _, ok := rmap[attachesTo]; !ok {
 			rmap[attachesTo] = make(ActionSet)
 		}
-		cs := newTestConditionSet(lg, r.origin, strings.Split(r.condition, ":"))
+		cs := newTestConditionSet(strings.Split(r.condition, "|"))
 		rmap[attachesTo][actsOn] |= cs
 	}
 	return rmap
@@ -416,15 +412,13 @@
 	result := make([]SourceSharePrivacyConflict, 0, len(data))
 	for _, c := range data {
 		fields := strings.Split(c.share, ":")
-		oshare := fields[0]
 		cshare := fields[1]
 		fields = strings.Split(c.privacy, ":")
-		oprivacy := fields[0]
 		cprivacy := fields[1]
 		result = append(result, SourceSharePrivacyConflict{
 			newTestNode(lg, c.sourceNode),
-			newTestCondition(lg, oshare, cshare),
-			newTestCondition(lg, oprivacy, cprivacy),
+			newTestCondition(cshare),
+			newTestCondition(cprivacy),
 		})
 	}
 	return result
diff --git a/tools/event_log_tags.bzl b/tools/event_log_tags.bzl
deleted file mode 100644
index 35305ae..0000000
--- a/tools/event_log_tags.bzl
+++ /dev/null
@@ -1,35 +0,0 @@
-"""Event log tags generation rule"""
-
-load("@bazel_skylib//lib:paths.bzl", "paths")
-
-def _event_log_tags_impl(ctx):
-    out_files = []
-    for logtag_file in ctx.files.srcs:
-        out_filename = paths.replace_extension(logtag_file.basename, ".java")
-        out_file = ctx.actions.declare_file(out_filename)
-        out_files.append(out_file)
-        ctx.actions.run(
-            inputs = [logtag_file],
-            outputs = [out_file],
-            arguments = [
-                "-o",
-                out_file.path,
-                logtag_file.path,
-            ],
-            progress_message = "Generating Java logtag file from %s" % logtag_file.short_path,
-            executable = ctx.executable._logtag_to_java_tool,
-        )
-    return [DefaultInfo(files = depset(out_files))]
-
-event_log_tags = rule(
-    implementation = _event_log_tags_impl,
-    attrs = {
-        "srcs": attr.label_list(allow_files = [".logtags"], mandatory = True),
-        "_logtag_to_java_tool": attr.label(
-            executable = True,
-            cfg = "exec",
-            allow_files = True,
-            default = Label("//build/make/tools:java-event-log-tags"),
-        ),
-    },
-)
diff --git a/tools/finalization/OWNERS b/tools/finalization/OWNERS
new file mode 100644
index 0000000..518b60d
--- /dev/null
+++ b/tools/finalization/OWNERS
@@ -0,0 +1,5 @@
+include platform/build/soong:/OWNERS
+smoreland@google.com
+alexbuy@google.com
+patb@google.com
+zyy@google.com
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
new file mode 100644
index 0000000..501f260
--- /dev/null
+++ b/tools/finalization/README.md
@@ -0,0 +1,22 @@
+# Finalization tools
+This folder contains automation and CI scripts for [finalizing](https://go/android-finalization) Android before release.
+
+## Automation:
+1. [Environment setup](./environment.sh). Set values for varios finalization constants.
+2. [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
+3. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
+4. [Finalize SDK and submit](./step-1.sh). Do [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) step, create CLs, organize them into topic and send to Gerrit.
+  a. [Update SDK and submit](./update-step-1.sh). Same as above, but updates the existings CLs.
+5. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
+  a. [Update Android and submit](./update-step-2.sh). Same as above, but updates the existings CLs.
+
+## CI:
+Performed in build targets in Finalization branches.
+1. [Finalization Step 1 for Main, git_main-fina-1-release](https://android-build.googleplex.com/builds/branches/git_main-fina-1-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
+2. [Finalization Step 1 for UDC, git_udc-fina-1-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-1-release/grid). Same but for udc-dev.
+3. [Finalization Step 2 for Main, git_main-fina-2-release](https://android-build.googleplex.com/builds/branches/git_main-fina-2-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
+4. [Finalization Step 2 for UDC, git_udc-fina-2-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-2-release/grid). Same but for udc-dev.
+5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
+
+## Utility:
+[Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch.
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
new file mode 100755
index 0000000..84e2782
--- /dev/null
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -0,0 +1,24 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step12() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # SDK codename -> int
+        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    fi;
+
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "sdk" ] ; then
+        # ADB, Platform/Mainline SDKs build and move to prebuilts
+        source $top/build/make/tools/finalization/localonly-steps.sh
+
+        # REL
+        source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+    fi;
+}
+
+finalize_main_step12
+
diff --git a/tools/finalization/build-step-1-and-m.sh b/tools/finalization/build-step-1-and-m.sh
new file mode 100755
index 0000000..0e7129f
--- /dev/null
+++ b/tools/finalization/build-step-1-and-m.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step1_and_m() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/build-step-1.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # This command tests:
+    #   The release state for AIDL.
+    #   ABI difference between user and userdebug builds.
+    #   Resource/SDK finalization.
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_main_step1_and_m
+
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
new file mode 100755
index 0000000..3c618fe
--- /dev/null
+++ b/tools/finalization/build-step-1.sh
@@ -0,0 +1,17 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step1() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # Build finalization artifacts.
+        # source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        echo "Build finalization artifacts."
+    fi;
+}
+
+finalize_main_step1
+
diff --git a/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff b/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
new file mode 100644
index 0000000..9ced2a9
--- /dev/null
+++ b/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
@@ -0,0 +1,30 @@
+From 12eea1512f2612f41b5cf7004ee2e6a189d548d7 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Thu, 01 Sep 2022 10:44:21 -0700
+Subject: [PATCH] Hacky workaround for half-finalized builds.
+
+Metalava increments the SDK level by one when it's not "REL", so we
+temporarily force the build to be "REL" while we're still in the
+process of finalizing it.
+
+This CL must be reverted as part of actually declaring "REL".
+
+Bug: none
+Test: Build
+Change-Id: I8c24c6dabec0270bc384d8465c582a4ddbe8bd6c
+---
+
+diff --git a/java/droidstubs.go b/java/droidstubs.go
+index 5777b18..ec4a0f4 100644
+--- a/java/droidstubs.go
++++ b/java/droidstubs.go
+@@ -386,7 +386,8 @@
+ 	}
+ 	if apiVersions != nil {
+ 		cmd.FlagWithArg("--current-version ", ctx.Config().PlatformSdkVersion().String())
+-		cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
++		// STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD
++		// cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
+ 		cmd.FlagWithInput("--apply-api-levels ", apiVersions)
+ 	}
+ }
diff --git a/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff b/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
new file mode 100644
index 0000000..7dec97c
--- /dev/null
+++ b/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
@@ -0,0 +1,26 @@
+From c0f6e8fe4c3b6803be97aeea6683631d616412f4 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Thu, 08 Dec 2022 17:52:52 +0000
+Subject: [PATCH] Revert "Hacky workaround for half-finalized builds."
+
+This reverts commit 12eea1512f2612f41b5cf7004ee2e6a189d548d7.
+
+Reason for revert: finalization-2
+
+Change-Id: Ifc801271628808693b1cb20206f8f81c9a6c694d
+---
+
+diff --git a/java/droidstubs.go b/java/droidstubs.go
+index ec4a0f4..5777b18 100644
+--- a/java/droidstubs.go
++++ b/java/droidstubs.go
+@@ -386,8 +386,7 @@
+ 	}
+ 	if apiVersions != nil {
+ 		cmd.FlagWithArg("--current-version ", ctx.Config().PlatformSdkVersion().String())
+-		// STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD
+-		// cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
++		cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
+ 		cmd.FlagWithInput("--apply-api-levels ", apiVersions)
+ 	}
+ }
diff --git a/tools/finalization/cleanup.sh b/tools/finalization/cleanup.sh
new file mode 100755
index 0000000..cd87b1d
--- /dev/null
+++ b/tools/finalization/cleanup.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+# Brings local repository to a remote head state.
+
+# set -ex
+
+function finalize_revert_local_changes_main() {
+    local top="$(dirname "$0")"/../../../..
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # remove the out folder
+    $m clobber
+
+    repo selfupdate
+
+    repo forall -c '\
+        git checkout . ; git revert --abort ; git clean -fdx ;\
+        git checkout @ ; git branch fina-step1 -D ; git reset --hard; \
+        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;'
+}
+
+finalize_revert_local_changes_main
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
new file mode 100755
index 0000000..8c838aa
--- /dev/null
+++ b/tools/finalization/environment.sh
@@ -0,0 +1,21 @@
+#!/bin/bash
+
+set -ex
+
+export FINAL_BUG_ID='0'
+
+export FINAL_PLATFORM_CODENAME='UpsideDownCake'
+export CURRENT_PLATFORM_CODENAME='UpsideDownCake'
+export FINAL_PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
+export FINAL_PLATFORM_SDK_VERSION='34'
+export FINAL_PLATFORM_VERSION='14'
+
+export FINAL_BUILD_PREFIX='UP1A'
+
+export FINAL_MAINLINE_EXTENSION='7'
+
+# Options:
+# 'unfinalized' - branch is in development state,
+# 'sdk' - SDK/API is finalized
+# 'rel' - branch is finalized, switched to REL
+export FINAL_STATE='unfinalized'
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
new file mode 100755
index 0000000..cdc2e3a
--- /dev/null
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -0,0 +1,148 @@
+#!/bin/bash
+
+set -ex
+
+function apply_droidstubs_hack() {
+    if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
+        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+    fi
+}
+
+function finalize_bionic_ndk() {
+    # Adding __ANDROID_API_<>__.
+    # If this hasn't done then it's not used and not really needed. Still, let's check and add this.
+    local api_level="$top/bionic/libc/include/android/api-level.h"
+    if ! grep -q "\__.*$((${FINAL_PLATFORM_SDK_VERSION}))" $api_level ; then
+        local tmpfile=$(mktemp /tmp/finalization.XXXXXX)
+        echo "
+/** Names the \"${FINAL_PLATFORM_CODENAME:0:1}\" API level ($FINAL_PLATFORM_SDK_VERSION), for comparison against \`__ANDROID_API__\`. */
+#define __ANDROID_API_${FINAL_PLATFORM_CODENAME:0:1}__ $FINAL_PLATFORM_SDK_VERSION" > "$tmpfile"
+
+        local api_level="$top/bionic/libc/include/android/api-level.h"
+        sed -i -e "/__.*$((${FINAL_PLATFORM_SDK_VERSION}-1))/r""$tmpfile" $api_level
+
+        rm "$tmpfile"
+    fi
+}
+
+function finalize_modules_utils() {
+    local shortCodename="${FINAL_PLATFORM_CODENAME:0:1}"
+    local methodPlaceholder="INSERT_NEW_AT_LEAST_${shortCodename}_METHOD_HERE"
+
+    local tmpfile=$(mktemp /tmp/finalization.XXXXXX)
+    echo "    /** Checks if the device is running on a release version of Android $FINAL_PLATFORM_CODENAME or newer */
+    @ChecksSdkIntAtLeast(api = $FINAL_PLATFORM_SDK_VERSION /* BUILD_VERSION_CODES.$FINAL_PLATFORM_CODENAME */)
+    public static boolean isAtLeast${FINAL_PLATFORM_CODENAME:0:1}() {
+        return SDK_INT >= $FINAL_PLATFORM_SDK_VERSION;
+    }" > "$tmpfile"
+
+    local javaFuncRegex='\/\*\*[^{]*isAtLeast'"${shortCodename}"'() {[^{}]*}'
+    local javaFuncReplace="N;N;N;N;N;N;N;N; s/$javaFuncRegex/$methodPlaceholder/; /$javaFuncRegex/!{P;D};"
+
+    local javaSdkLevel="$top/frameworks/libs/modules-utils/java/com/android/modules/utils/build/SdkLevel.java"
+    sed -i "$javaFuncReplace" $javaSdkLevel
+
+    sed -i "/${methodPlaceholder}"'/{
+           r '"$tmpfile"'
+           d}' $javaSdkLevel
+
+    echo "// Checks if the device is running on release version of Android ${FINAL_PLATFORM_CODENAME:0:1} or newer.
+inline bool IsAtLeast${FINAL_PLATFORM_CODENAME:0:1}() { return android_get_device_api_level() >= $FINAL_PLATFORM_SDK_VERSION; }" > "$tmpfile"
+
+    local cppFuncRegex='\/\/[^{]*IsAtLeast'"${shortCodename}"'() {[^{}]*}'
+    local cppFuncReplace="N;N;N;N;N;N; s/$cppFuncRegex/$methodPlaceholder/; /$cppFuncRegex/!{P;D};"
+
+    local cppSdkLevel="$top/frameworks/libs/modules-utils/build/include/android-modules-utils/sdk_level.h"
+    sed -i "$cppFuncReplace" $cppSdkLevel
+    sed -i "/${methodPlaceholder}"'/{
+           r '"$tmpfile"'
+           d}' $cppSdkLevel
+
+    rm "$tmpfile"
+}
+
+function finalize_aidl_vndk_sdk_resources() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local SDK_CODENAME="public static final int $FINAL_PLATFORM_CODENAME_JAVA = CUR_DEVELOPMENT;"
+    local SDK_VERSION="public static final int $FINAL_PLATFORM_CODENAME_JAVA = $FINAL_PLATFORM_SDK_VERSION;"
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
+
+    # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+
+    # apply droidstubs hack to prevent tools from incrementing an API version
+    apply_droidstubs_hack
+
+    # bionic/NDK
+    finalize_bionic_ndk
+
+    # VNDK definitions for new SDK version
+    cp "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-current.txt" \
+       "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-$FINAL_PLATFORM_SDK_VERSION.txt"
+
+    AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
+
+    # Generate ABI dumps
+    ANDROID_BUILD_TOP="$top" \
+        out/host/linux-x86/bin/create_reference_dumps \
+        -p aosp_arm64 --build-variant user
+
+    echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
+    # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
+    $m check-vndk-list || \
+        { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
+    echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
+
+    # Finalize SDK
+
+    # frameworks/libs/modules-utils
+    finalize_modules_utils
+
+    # build/make
+    local version_defaults="$top/build/make/core/version_defaults.mk"
+    sed -i -e "s/PLATFORM_SDK_VERSION := .*/PLATFORM_SDK_VERSION := ${FINAL_PLATFORM_SDK_VERSION}/g" $version_defaults
+    sed -i -e "s/PLATFORM_VERSION_LAST_STABLE := .*/PLATFORM_VERSION_LAST_STABLE := ${FINAL_PLATFORM_VERSION}/g" $version_defaults
+    sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
+    cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
+
+    # build/soong
+    sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t\t\"${FINAL_PLATFORM_CODENAME}\":     ${FINAL_PLATFORM_SDK_VERSION}," "$top/build/soong/android/api_levels.go"
+
+    # cts
+    echo ${FINAL_PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt"
+    sed -i -e "s/EXPECTED_SDK = $((${FINAL_PLATFORM_SDK_VERSION}-1))/EXPECTED_SDK = ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java"
+
+    # libcore
+    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/libcore/dalvik/src/main/java/dalvik/annotation/compat/VersionCodes.java"
+
+    # platform_testing
+    local version_codes="$top/platform_testing/libraries/compatibility-common-util/src/com/android/compatibility/common/util/VersionCodes.java"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1));/a \\    ${SDK_VERSION}" $version_codes
+
+    # Finalize resources
+    "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
+           "$top/frameworks/base/core/res/res/values/public-staging.xml" \
+           "$top/frameworks/base/core/res/res/values/public-final.xml"
+
+    # frameworks/base
+    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\    SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\  SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h"
+
+    # Bump Mainline SDK extension version.
+    set +e
+    "$top/packages/modules/SdkExtensions/gen_sdk/bump_sdk.sh" ${FINAL_MAINLINE_EXTENSION}
+    set -e
+    local version_defaults="$top/build/make/core/version_defaults.mk"
+    sed -i -e "s/PLATFORM_SDK_EXTENSION_VERSION := .*/PLATFORM_SDK_EXTENSION_VERSION := ${FINAL_MAINLINE_EXTENSION}/g" $version_defaults
+
+    # Force update current.txt
+    $m clobber
+    $m update-api
+}
+
+finalize_aidl_vndk_sdk_resources
+
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
new file mode 100755
index 0000000..cbee005
--- /dev/null
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -0,0 +1,55 @@
+#!/bin/bash
+
+set -ex
+
+function revert_droidstubs_hack() {
+    if grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
+        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
+    fi
+}
+
+function apply_prerelease_sdk_hack() {
+    if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then
+        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_hack.diff
+    fi
+}
+
+function finalize_sdk_rel() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # revert droidstubs hack now we are switching to REL
+    revert_droidstubs_hack
+
+    # let the apps built with pre-release SDK parse
+    apply_prerelease_sdk_hack
+
+    # build/make/core/version_defaults.mk
+    sed -i -e "s/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := REL/g" "$top/build/make/core/version_defaults.mk"
+
+    # cts
+    echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt"
+    if [ "$FINAL_PLATFORM_CODENAME" != "$CURRENT_PLATFORM_CODENAME" ]; then
+        echo "$CURRENT_PLATFORM_CODENAME" >> "./cts/tests/tests/os/assets/platform_versions.txt"
+    fi
+    git -C "$top/cts" mv hostsidetests/theme/assets/${FINAL_PLATFORM_CODENAME} hostsidetests/theme/assets/${FINAL_PLATFORM_SDK_VERSION}
+
+    # system/sepolicy
+    mkdir -p "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+    cp -r "$top/system/sepolicy/public/" "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+    cp -r "$top/system/sepolicy/private/" "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+
+    # prebuilts/abi-dumps/ndk
+    mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
+    cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/"
+
+    # prebuilts/abi-dumps/vndk
+    mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
+
+    # prebuilts/abi-dumps/platform
+    mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
+    cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
+}
+
+finalize_sdk_rel
+
diff --git a/tools/finalization/frameworks_base.apply_hack.diff b/tools/finalization/frameworks_base.apply_hack.diff
new file mode 100644
index 0000000..545c230
--- /dev/null
+++ b/tools/finalization/frameworks_base.apply_hack.diff
@@ -0,0 +1,129 @@
+From 3c9a5321dc94124367f2f4363d85a8f488f5d4d1 Mon Sep 17 00:00:00 2001
+From: Yurii Zubrytskyi <zyy@google.com>
+Date: Wed, 04 May 2022 01:05:24 -0700
+Subject: [PATCH] HACK: allow apps with pre-release SDK RESTRICT AUTOMERGE
+
+Revert before releasing
+Let the apps built with pre-release Tiramisu SDK parse
++ fix a test that didn't expect REL builds to throw
+  when checking for lettered versions
+
+Test: build
+Bug: 225745567
+Bug: 231407096
+Change-Id: Ia0de2ab1a99e5f186f0d871e6225d88bf3308df6
+---
+
+diff --git a/core/java/android/content/pm/PackageParser.java b/core/java/android/content/pm/PackageParser.java
+index c15b3e0..3f4df4d 100644
+--- a/core/java/android/content/pm/PackageParser.java
++++ b/core/java/android/content/pm/PackageParser.java
+@@ -2628,6 +2628,15 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                targetCode)) {
++            Slog.w(TAG, "Package requires development platform " + targetCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return Build.VERSION.SDK_INT;
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + targetCode
+@@ -2699,6 +2708,15 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                minCode)) {
++            Slog.w(TAG, "Package requires min development platform " + minCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return Build.VERSION.SDK_INT;
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + minCode
+diff --git a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+index 3e1c5bb..8cc4cdb 100644
+--- a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
++++ b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+@@ -316,6 +316,15 @@
+             return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                        minCode)) {
++            Slog.w(TAG, "Parsed package requires min development platform " + minCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return input.success(Build.VERSION.SDK_INT);
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK,
+@@ -368,19 +377,27 @@
+             return input.success(targetVers);
+         }
+ 
++        // If it's a pre-release SDK and the codename matches this platform, it
++        // definitely targets this SDK.
++        if (matchTargetCode(platformSdkCodenames, targetCode)) {
++            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
++        }
++
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                        targetCode)) {
++            Slog.w(TAG, "Parsed package requires development platform " + targetCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return input.success(Build.VERSION.SDK_INT);
++        }
++
+         try {
+             if (allowUnknownCodenames && UnboundedSdkLevel.isAtMost(targetCode)) {
+                 return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+             }
+         } catch (IllegalArgumentException e) {
+-            // isAtMost() throws it when encountering an older SDK codename
+-            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, e.getMessage());
+-        }
+-
+-        // If it's a pre-release SDK and the codename matches this platform, it
+-        // definitely targets this SDK.
+-        if (matchTargetCode(platformSdkCodenames, targetCode)) {
+-            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
++            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, "Bad package SDK");
+         }
+ 
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+diff --git a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+index 92c7871..687e8f7 100644
+--- a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
++++ b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+@@ -446,14 +446,14 @@
+                         + "    <library \n"
+                         + "        name=\"foo\"\n"
+                         + "        file=\"" + mFooJar + "\"\n"
+-                        + "        on-bootclasspath-before=\"Q\"\n"
++                        + "        on-bootclasspath-before=\"A\"\n"
+                         + "        on-bootclasspath-since=\"W\"\n"
+                         + "     />\n\n"
+                         + " </permissions>";
+         parseSharedLibraries(contents);
+         assertFooIsOnlySharedLibrary();
+         SystemConfig.SharedLibraryEntry entry = mSysConfig.getSharedLibraries().get("foo");
+-        assertThat(entry.onBootclasspathBefore).isEqualTo("Q");
++        assertThat(entry.onBootclasspathBefore).isEqualTo("A");
+         assertThat(entry.onBootclasspathSince).isEqualTo("W");
+     }
+ 
diff --git a/tools/finalization/frameworks_base.revert_hack.diff b/tools/finalization/frameworks_base.revert_hack.diff
new file mode 100644
index 0000000..1d147b1
--- /dev/null
+++ b/tools/finalization/frameworks_base.revert_hack.diff
@@ -0,0 +1,125 @@
+From b4ae5c71f327d00081bbb0b7b26d48eb88761fbc Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Tue, 21 Feb 2023 01:43:14 +0000
+Subject: [PATCH] Revert "HACK: allow apps with pre-release SDK RESTRICT AUTOMERGE"
+
+This reverts commit 3c9a5321dc94124367f2f4363d85a8f488f5d4d1.
+
+Reason for revert: not needed anymore
+
+Change-Id: I5c5e3af78a41e7bd8cbc99464dccc57c345105f3
+---
+
+diff --git a/core/java/android/content/pm/PackageParser.java b/core/java/android/content/pm/PackageParser.java
+index 3f4df4d..c15b3e0 100644
+--- a/core/java/android/content/pm/PackageParser.java
++++ b/core/java/android/content/pm/PackageParser.java
+@@ -2628,15 +2628,6 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                targetCode)) {
+-            Slog.w(TAG, "Package requires development platform " + targetCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return Build.VERSION.SDK_INT;
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + targetCode
+@@ -2708,15 +2699,6 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                minCode)) {
+-            Slog.w(TAG, "Package requires min development platform " + minCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return Build.VERSION.SDK_INT;
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + minCode
+diff --git a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+index 8cc4cdb..3e1c5bb 100644
+--- a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
++++ b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+@@ -316,15 +316,6 @@
+             return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                        minCode)) {
+-            Slog.w(TAG, "Parsed package requires min development platform " + minCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return input.success(Build.VERSION.SDK_INT);
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK,
+@@ -377,27 +368,19 @@
+             return input.success(targetVers);
+         }
+ 
+-        // If it's a pre-release SDK and the codename matches this platform, it
+-        // definitely targets this SDK.
+-        if (matchTargetCode(platformSdkCodenames, targetCode)) {
+-            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+-        }
+-
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                        targetCode)) {
+-            Slog.w(TAG, "Parsed package requires development platform " + targetCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return input.success(Build.VERSION.SDK_INT);
+-        }
+-
+         try {
+             if (allowUnknownCodenames && UnboundedSdkLevel.isAtMost(targetCode)) {
+                 return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+             }
+         } catch (IllegalArgumentException e) {
+-            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, "Bad package SDK");
++            // isAtMost() throws it when encountering an older SDK codename
++            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, e.getMessage());
++        }
++
++        // If it's a pre-release SDK and the codename matches this platform, it
++        // definitely targets this SDK.
++        if (matchTargetCode(platformSdkCodenames, targetCode)) {
++            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+diff --git a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+index 687e8f7..92c7871 100644
+--- a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
++++ b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+@@ -446,14 +446,14 @@
+                         + "    <library \n"
+                         + "        name=\"foo\"\n"
+                         + "        file=\"" + mFooJar + "\"\n"
+-                        + "        on-bootclasspath-before=\"A\"\n"
++                        + "        on-bootclasspath-before=\"Q\"\n"
+                         + "        on-bootclasspath-since=\"W\"\n"
+                         + "     />\n\n"
+                         + " </permissions>";
+         parseSharedLibraries(contents);
+         assertFooIsOnlySharedLibrary();
+         SystemConfig.SharedLibraryEntry entry = mSysConfig.getSharedLibraries().get("foo");
+-        assertThat(entry.onBootclasspathBefore).isEqualTo("A");
++        assertThat(entry.onBootclasspathBefore).isEqualTo("Q");
+         assertThat(entry.onBootclasspathSince).isEqualTo("W");
+     }
+ 
diff --git a/tools/finalization/localonly-steps.sh b/tools/finalization/localonly-steps.sh
new file mode 100755
index 0000000..6107b3e
--- /dev/null
+++ b/tools/finalization/localonly-steps.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_locally() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
+
+    # adb keys
+    $m adb
+    LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key"
+
+    # Build Platform SDKs.
+    $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist
+
+    # Build Modules SDKs.
+    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh"
+
+    # Update prebuilts.
+    "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
+}
+
+finalize_locally
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
new file mode 100755
index 0000000..0dd4b3a
--- /dev/null
+++ b/tools/finalization/step-1.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Script to perform a 1st step of Android Finalization: API/SDK finalization, create CLs and upload to Gerrit.
+
+set -ex
+
+function commit_step_1_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
+            git add -A . ;
+            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION and extension version $FINAL_MAINLINE_EXTENSION" \
+                       -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_1_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # vndk etc finalization
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_1_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_1_main
diff --git a/tools/finalization/step-2.sh b/tools/finalization/step-2.sh
new file mode 100755
index 0000000..d0b24ae
--- /dev/null
+++ b/tools/finalization/step-2.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
+
+function commit_step_2_changes() {
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
+            git add -A . ;
+            git commit -m "$FINAL_PLATFORM_CODENAME/$FINAL_PLATFORM_SDK_VERSION is now REL" \
+                       -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_2_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # prebuilts etc
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_2_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_2_main
diff --git a/tools/finalization/update-step-1.sh b/tools/finalization/update-step-1.sh
new file mode 100755
index 0000000..b469988
--- /dev/null
+++ b/tools/finalization/update-step-1.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Script to perform a 1st step of Android Finalization: API/SDK finalization, update CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_1_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_1_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # vndk etc finalization
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # update existing CLs and upload to gerrit
+    update_step_1_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_1_main
diff --git a/tools/finalization/update-step-2.sh b/tools/finalization/update-step-2.sh
new file mode 100755
index 0000000..d2b8592
--- /dev/null
+++ b/tools/finalization/update-step-2.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_2_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_2_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # prebuilts etc
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    update_step_2_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_2_main
diff --git a/tools/generate_gts_shared_report.py b/tools/generate_gts_shared_report.py
new file mode 100644
index 0000000..11c9364
--- /dev/null
+++ b/tools/generate_gts_shared_report.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Checks and generates a report for gts modules that should be open-sourced.
+
+Usage:
+  generate_gts_open_source_report.py
+    --gtsv-metalic [gts-verifier meta_lic]
+    --gts-test-metalic [android-gts meta_lic]
+    --checkshare [COMPLIANCE_CHECKSHARE]
+    --gts-test-dir [directory of android-gts]
+    --output [output file]
+
+Output example:
+  GTS-Verifier: PASS/FAIL
+  GTS-Modules: PASS/FAIL
+    GtsIncrementalInstallTestCases_BackgroundProcess
+    GtsUnsignedNetworkStackTestCases
+"""
+import sys
+import argparse
+import subprocess
+import re
+
+def _get_args():
+    """Parses input arguments."""
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--gtsv-metalic', required=True,
+        help='license meta_lic file path of gts-verifier.zip')
+    parser.add_argument(
+        '--gts-test-metalic', required=True,
+        help='license meta_lic file path of android-gts.zip')
+    parser.add_argument(
+        '--checkshare', required=True,
+        help='path of the COMPLIANCE_CHECKSHARE tool')
+    parser.add_argument(
+        '--gts-test-dir', required=True,
+        help='directory of android-gts')
+    parser.add_argument(
+        '-o', '--output', required=True,
+        help='file path of the output report')
+    return parser.parse_args()
+
+def _check_gtsv(checkshare: str, gtsv_metalic: str) -> str:
+    """Checks gts-verifier license.
+
+    Args:
+      checkshare: path of the COMPLIANCE_CHECKSHARE tool
+      gtsv_metalic: license meta_lic file path of gts-verifier.zip
+
+    Returns:
+      PASS when gts-verifier.zip doesn't need to be shared, and FAIL
+      when gts-verifier.zip need to be shared.
+    """
+    cmd = f'{checkshare} {gtsv_metalic}'
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    proc.communicate()
+    return 'PASS' if proc.returncode == 0 else 'FAIL'
+
+def _check_gts_test(checkshare: str, gts_test_metalic: str,
+                    gts_test_dir: str) -> tuple[str, set[str]]:
+    """Checks android-gts license.
+
+    Args:
+      checkshare: path of the COMPLIANCE_CHECKSHARE tool
+      gts_test_metalic: license meta_lic file path of android-gts.zip
+      gts_test_dir: directory of android-gts
+
+    Returns:
+      Check result (PASS when android-gts doesn't need to be shared,
+      FAIL when some gts modules need to be shared) and gts modules
+      that need to be shared.
+    """
+    cmd = f'{checkshare} {gts_test_metalic}'
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    _, str_stderr = map(lambda b: b.decode(), proc.communicate())
+    if proc.returncode == 0:
+        return 'PASS', []
+    open_source_modules = set()
+    for error_line in str_stderr.split('\n'):
+        # Skip the empty liness
+        if not error_line:
+            continue
+        module_meta_lic = error_line.strip().split()[0]
+        groups = re.fullmatch(
+            re.compile(f'.*/{gts_test_dir}/(.*)'), module_meta_lic)
+        if groups:
+            open_source_modules.add(
+                groups[1].removesuffix('.meta_lic'))
+    return 'FAIL', open_source_modules
+
+
+def main(argv):
+    args = _get_args()
+
+    gtsv_metalic = args.gtsv_metalic
+    gts_test_metalic = args.gts_test_metalic
+    output_file = args.output
+    checkshare = args.checkshare
+    gts_test_dir = args.gts_test_dir
+
+    with open(output_file, 'w') as file:
+        result = _check_gtsv(checkshare, gtsv_metalic)
+        file.write(f'GTS-Verifier: {result}\n')
+        result, open_source_modules = _check_gts_test(
+            checkshare, gts_test_metalic, gts_test_dir)
+        file.write(f'GTS-Modules: {result}\n')
+        for open_source_module in open_source_modules:
+            file.write(f'\t{open_source_module}\n')
+
+if __name__ == "__main__":
+    main(sys.argv)
\ No newline at end of file
diff --git a/tools/list_files.py b/tools/list_files.py
new file mode 100644
index 0000000..3afa81f
--- /dev/null
+++ b/tools/list_files.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List
+from glob import glob
+from pathlib import Path
+from os.path import join, relpath
+import argparse
+
+class FileLister:
+    def __init__(self, args) -> None:
+        self.out_file = args.out_file
+
+        self.folder_dir = args.dir
+        self.extensions = [e if e.startswith(".") else "." + e for e in args.extensions]
+        self.root = args.root
+        self.files_list = list()
+
+    def get_files(self) -> None:
+        """Get all files directory in the input directory including the files in the subdirectories
+
+        Recursively finds all files in the input directory.
+        Set file_list as a list of file directory strings,
+        which do not include directories but only files.
+        List is sorted in alphabetical order of the file directories.
+
+        Args:
+            dir: Directory to get the files. String.
+
+        Raises:
+            FileNotFoundError: An error occurred accessing the non-existing directory
+        """
+
+        if not dir_exists(self.folder_dir):
+            raise FileNotFoundError(f"Directory {self.folder_dir} does not exist")
+
+        if self.folder_dir[:-2] != "**":
+            self.folder_dir = join(self.folder_dir, "**")
+
+        self.files_list = list()
+        for file in sorted(glob(self.folder_dir, recursive=True)):
+            if Path(file).is_file():
+                if self.root:
+                    file = join(self.root, relpath(file, self.folder_dir[:-2]))
+                self.files_list.append(file)
+
+
+    def list(self) -> None:
+        self.get_files()
+        self.files_list = [f for f in self.files_list if not self.extensions or Path(f).suffix in self.extensions]
+        self.write()
+
+    def write(self) -> None:
+        if self.out_file == "":
+            pprint(self.files_list)
+        else:
+            write_lines(self.out_file, self.files_list)
+
+###
+# Helper functions
+###
+def pprint(l: List[str]) -> None:
+    for line in l:
+        print(line)
+
+def dir_exists(dir: str) -> bool:
+    return Path(dir).exists()
+
+def write_lines(out_file: str, lines: List[str]) -> None:
+    with open(out_file, "w+") as f:
+        f.writelines(line + '\n' for line in lines)
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument('dir', action='store', type=str,
+                        help="directory to list all subdirectory files")
+    parser.add_argument('--out', dest='out_file',
+                        action='store', default="", type=str,
+                        help="optional directory to write subdirectory files. If not set, will print to console")
+    parser.add_argument('--root', dest='root',
+                        action='store', default="", type=str,
+                        help="optional directory to replace the root directories of output.")
+    parser.add_argument('--extensions', nargs='*', default=list(), dest='extensions',
+                        help="Extensions to include in the output. If not set, all files are included")
+
+    args = parser.parse_args()
+
+    file_lister = FileLister(args)
+    file_lister.list()
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 38d17a8..31a460d 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -43,7 +43,7 @@
   """Validate GRF properties if exist.
 
   If ro.board.first_api_level is defined, check if its value is valid for the
-  sdk version.
+  sdk version. This is only for the release version.
   Also, validate the value of ro.board.api_level if defined.
 
   Returns:
@@ -51,6 +51,7 @@
   """
   grf_api_level = prop_list.get_value("ro.board.first_api_level")
   board_api_level = prop_list.get_value("ro.board.api_level")
+  platform_version_codename = prop_list.get_value("ro.build.version.codename")
 
   if not grf_api_level:
     if board_api_level:
@@ -61,6 +62,18 @@
     return True
 
   grf_api_level = int(grf_api_level)
+  if board_api_level:
+    board_api_level = int(board_api_level)
+    if board_api_level < grf_api_level:
+      sys.stderr.write("error: ro.board.api_level(%d) must be greater than "
+                       "ro.board.first_api_level(%d)\n"
+                       % (board_api_level, grf_api_level))
+      return False
+
+  # skip sdk version validation for dev-stage non-REL devices
+  if platform_version_codename != "REL":
+    return True
+
   if grf_api_level > sdk_version:
     sys.stderr.write("error: ro.board.first_api_level(%d) must be less than "
                      "or equal to ro.build.version.sdk(%d)\n"
@@ -68,12 +81,10 @@
     return False
 
   if board_api_level:
-    board_api_level = int(board_api_level)
-    if board_api_level < grf_api_level or board_api_level > sdk_version:
-      sys.stderr.write("error: ro.board.api_level(%d) must be neither less "
-                       "than ro.board.first_api_level(%d) nor greater than "
-                       "ro.build.version.sdk(%d)\n"
-                       % (board_api_level, grf_api_level, sdk_version))
+    if board_api_level > sdk_version:
+      sys.stderr.write("error: ro.board.api_level(%d) must be less than or "
+                       "equal to ro.build.version.sdk(%d)\n"
+                       % (board_api_level, sdk_version))
       return False
 
   return True
diff --git a/tools/protos/Android.bp b/tools/protos/Android.bp
new file mode 100644
index 0000000..c6ad19e
--- /dev/null
+++ b/tools/protos/Android.bp
@@ -0,0 +1,32 @@
+// Copyright 2023 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_library_host {
+    name: "metadata_file_proto_py",
+    version: {
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+        "metadata_file.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
diff --git a/tools/protos/metadata_file.proto b/tools/protos/metadata_file.proto
new file mode 100644
index 0000000..ac1129a
--- /dev/null
+++ b/tools/protos/metadata_file.proto
@@ -0,0 +1,281 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package metadata_file;
+
+// Proto definition of METADATA files of packages in AOSP codebase.
+message Metadata {
+  // Name of the package.
+  optional string name = 1;
+
+  // A short description (a few lines) of the package.
+  // Example: "Handles location lookups, throttling, batching, etc."
+  optional string description = 2;
+
+  // Specifies additional data about third-party packages.
+  optional ThirdParty third_party = 3;
+}
+
+message ThirdParty {
+  // URL(s) associated with the package.
+  //
+  // At a minimum, all packages must specify a URL which identifies where it
+  // came from, containing a type of: ARCHIVE, GIT or OTHER. Typically,
+  // a package should contain only a single URL from these types.  Occasionally,
+  // a package may be broken across multiple archive files for whatever reason,
+  // in which case having multiple ARCHIVE URLs is okay.  However, this should
+  // not be used to combine different logical packages that are versioned and
+  // possibly licensed differently.
+  repeated URL url = 1;
+
+  // The package version.  In order of preference, this should contain:
+  //  - If the package comes from Git or another source control system,
+  //    a specific tag or revision in source control, such as "r123" or
+  //    "58e27d2".  This MUST NOT be a mutable ref such as a branch name.
+  //  - a released package version such as "1.0", "2.3-beta", etc.
+  //  - the date the package was retrieved, formatted as "As of YYYY-MM-DD".
+  optional string version = 2;
+
+  // The date of the change in which the package was last upgraded from
+  // upstream.
+  // This should only identify package upgrades from upstream, not local
+  // modifications. This may identify the date of either the original or
+  // merged change.
+  //
+  // Note: this is NOT the date that this version of the package was released
+  // externally.
+  optional Date last_upgrade_date = 3;
+
+  // License type that identifies how the package may be used.
+  optional LicenseType license_type = 4;
+
+  // An additional note explaining the licensing of this package.  This is most
+  // commonly used with commercial license.
+  optional string license_note = 5;
+
+  // Description of local changes that have been made to the package.  This does
+  // not need to (and in most cases should not) attempt to include an exhaustive
+  // list of all changes, but may instead direct readers to review the local
+  // commit history, a collection of patch files, a separate README.md (or
+  // similar) document, etc.
+  // Note: Use of this field to store IDs of advisories fixed with a backported
+  // patch is deprecated, use "security.mitigated_security_patch" instead.
+  optional string local_modifications = 6;
+
+  // Security related metadata including risk category and any special
+  // instructions for using the package, as determined by an ISE-TPS review.
+  optional Security security = 7;
+
+  // The type of directory this metadata represents.
+  optional DirectoryType type = 8 [default = PACKAGE];
+
+  // The homepage for the package. This will eventually replace
+  // `url { type: HOMEPAGE }`
+  optional string homepage = 9;
+
+  // SBOM information of the package. It is mandatory for prebuilt packages.
+  oneof sbom {
+    // Reference to external SBOM document provided as URL.
+    SBOMRef sbom_ref = 10;
+  }
+
+}
+
+// URL associated with a third-party package.
+message URL {
+  enum Type {
+    // The homepage for the package. For example, "https://bazel.io/". This URL
+    // is optional, but encouraged to help disambiguate similarly named packages
+    // or to get more information about the package. This is especially helpful
+    // when no other URLs provide human readable resources (such as git:// or
+    // sso:// URLs).
+    HOMEPAGE = 1;
+
+    // The URL of the archive containing the source code for the package, for
+    // example a zip or tgz file.
+    ARCHIVE = 2;
+
+    // The URL of the upstream git repository this package is retrieved from.
+    // For example:
+    //  - https://github.com/git/git.git
+    //  - git://git.kernel.org/pub/scm/git/git.git
+    //
+    // Use of a git URL requires that the package "version" value must specify a
+    // specific git tag or revision.
+    GIT = 3;
+
+    // The URL of the upstream SVN repository this package is retrieved from.
+    // For example:
+    //  - http://llvm.org/svn/llvm-project/llvm/
+    //
+    // Use of an SVN URL requires that the package "version" value must specify
+    // a specific SVN tag or revision.
+    SVN = 4;
+
+    // The URL of the upstream mercurial repository this package is retrieved
+    // from. For example:
+    //   - https://mercurial-scm.org/repo/evolve
+    //
+    // Use of a mercurial URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    HG = 5;
+
+    // The URL of the upstream darcs repository this package is retrieved
+    // from. For example:
+    //   - https://hub.darcs.net/hu.dwim/hu.dwim.util
+    //
+    // Use of a DARCS URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    DARCS = 6;
+
+    PIPER = 7;
+
+    // A URL that does not fit any other type. This may also indicate that the
+    // source code was received via email or some other out-of-band way. This is
+    // most commonly used with commercial software received directly from the
+    // vendor. In the case of email, the URL value can be used to provide
+    // additional information about how it was received.
+    OTHER = 8;
+
+    // The URL identifying where the local copy of the package source code can
+    // be found.
+    //
+    // Typically, the metadata files describing a package reside in the same
+    // directory as the source code for the package. In a few rare cases where
+    // they are separate, the LOCAL_SOURCE URL identifies where to find the
+    // source code. This only describes where to find the local copy of the
+    // source; there should always be an additional URL describing where the
+    // package was retrieved from.
+    //
+    // Examples:
+    //  - https://android.googlesource.com/platform/external/apache-http/
+    LOCAL_SOURCE = 9;
+  }
+
+  // The type of resource this URL identifies.
+  optional Type type = 1;
+
+  // The actual URL value.  URLs should be absolute and start with 'http://' or
+  // 'https://' (or occasionally 'git://' or 'ftp://' where appropriate).
+  optional string value = 2;
+}
+
+// License type that identifies how the packages may be used.
+enum LicenseType {
+  BY_EXCEPTION_ONLY = 1;
+  NOTICE = 2;
+  PERMISSIVE = 3;
+  RECIPROCAL = 4;
+  RESTRICTED_IF_STATICALLY_LINKED = 5;
+  RESTRICTED = 6;
+  UNENCUMBERED = 7;
+}
+
+// Identifies security related metadata including risk category and any special
+// instructions for using the package.
+message Security {
+  // Security risk category for a package, as determined by an ISE-TPS review.
+  enum Category {
+    CATEGORY_UNSPECIFIED = 0;
+
+    // Package should only be used in a sandboxed environment.
+    // Package should have restricted visibility.
+    SANDBOXED_ONLY = 1;
+
+    // Package should not be used to process user content. It is considered
+    // safe to use to process trusted data only. Package should have restricted
+    // visibility.
+    TRUSTED_DATA_ONLY = 2;
+
+    // Package is considered safe to use.
+    REVIEWED_AND_SECURE = 3;
+  }
+
+  // Identifies the security risk category for the package.  This will be
+  // provided by the ISE-TPS team as the result of a security review of the
+  // package.
+  optional Category category = 1;
+
+  // An additional security note for the package.
+  optional string note = 2;
+
+  // Text tag to categorize the package. It's currently used by security to:
+  // - to disable OSV (https://osv.dev)
+  // support via the `OSV:disable` tag
+  // - to attach CPE to their corresponding packages, for vulnerability
+  // monitoring:
+  //
+  // Please do document your usecase here should you want to add one.
+  repeated string tag = 3;
+
+  // ID of advisories fixed with a mitigated patch, for example CVE-2018-1111.
+  repeated string mitigated_security_patch = 4;
+}
+
+enum DirectoryType {
+  UNDEFINED = 0;
+
+  // This directory represents a package.
+  PACKAGE = 1;
+
+  // This directory is designed to organize multiple third-party PACKAGE
+  // directories.
+  GROUP = 2;
+
+  // This directory contains several PACKAGE directories representing
+  // different versions of the same third-party project.
+  VERSIONS = 3;
+}
+
+// Represents a whole or partial calendar date, such as a birthday. The time of
+// day and time zone are either specified elsewhere or are insignificant. The
+// date is relative to the Gregorian Calendar. This can represent one of the
+// following:
+//
+// * A full date, with non-zero year, month, and day values.
+// * A month and day, with a zero year (for example, an anniversary).
+// * A year on its own, with a zero month and a zero day.
+// * A year and month, with a zero day (for example, a credit card expiration
+//   date).
+message Date {
+  // Year of the date. Must be from 1 to 9999, or 0 to specify a date without
+  // a year.
+  optional int32 year = 1;
+  // Month of a year. Must be from 1 to 12, or 0 to specify a year without a
+  // month and day.
+  optional int32 month = 2;
+  // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0
+  // to specify a year by itself or a year and month where the day isn't
+  // significant.
+  optional int32 day = 3;
+}
+
+// Reference to external SBOM document and element corresponding to the package.
+// See https://spdx.github.io/spdx-spec/v2.3/document-creation-information/#66-external-document-references-field
+message SBOMRef {
+  // The URL that points to the SBOM document of the upstream package of this
+  // third_party package.
+  optional string url = 1;
+  // Checksum of the SBOM document the url field points to.
+  // Format: e.g. SHA1:<checksum>, or any algorithm defined in
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#8.4
+  optional string checksum = 2;
+  // SPDXID of the upstream package/file defined in the SBOM document the url field points to.
+  // Format: SPDXRef-[a-zA-Z0-9.-]+, see
+  // https://spdx.github.io/spdx-spec/v2.3/package-information/#72-package-spdx-identifier-field or
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#82-file-spdx-identifier-field
+  optional string element_id = 3;
+}
\ No newline at end of file
diff --git a/tools/rbcrun/Android.bp b/tools/rbcrun/Android.bp
index 90173ac..fcc33ef 100644
--- a/tools/rbcrun/Android.bp
+++ b/tools/rbcrun/Android.bp
@@ -19,7 +19,7 @@
 
 blueprint_go_binary {
     name: "rbcrun",
-    srcs: ["cmd/rbcrun.go"],
+    srcs: ["rbcrun/rbcrun.go"],
     deps: ["rbcrun-module"],
 }
 
diff --git a/tools/rbcrun/go.mod b/tools/rbcrun/go.mod
index a029eb4..5ae2972 100644
--- a/tools/rbcrun/go.mod
+++ b/tools/rbcrun/go.mod
@@ -1,9 +1,6 @@
 module rbcrun
 
-require (
-	github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d // indirect
-	go.starlark.net v0.0.0-20201006213952-227f4aabceb5
-)
+require go.starlark.net v0.0.0-20201006213952-227f4aabceb5
 
 replace go.starlark.net => ../../../../external/starlark-go
 
diff --git a/tools/rbcrun/go.sum b/tools/rbcrun/go.sum
index db4d51e..10761a8 100644
--- a/tools/rbcrun/go.sum
+++ b/tools/rbcrun/go.sum
@@ -1,11 +1,8 @@
 cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
-github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=
 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
-github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=
 github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
 github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
 github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -26,8 +23,6 @@
 github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
 github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -44,9 +39,6 @@
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
diff --git a/tools/rbcrun/cmd/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
similarity index 100%
rename from tools/rbcrun/cmd/rbcrun.go
rename to tools/rbcrun/rbcrun/rbcrun.go
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 29fc771..a76dc8a 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -334,6 +334,9 @@
         "ota_utils.py",
         "payload_signer.py",
     ],
+    libs: [
+        "releasetools_common",
+    ],
 }
 
 python_binary_host {
@@ -356,6 +359,21 @@
 }
 
 python_binary_host {
+    name: "create_brick_ota",
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    srcs: [
+        "create_brick_ota.py",
+    ],
+    libs: [
+        "ota_utils_lib",
+    ],
+}
+
+python_binary_host {
     name: "build_image",
     defaults: [
         "releasetools_binary_defaults",
@@ -598,6 +616,7 @@
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
         ":com.android.apex.compressed.v1_original",
+        ":com.android.apex.vendor.foo.with_vintf"
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index 465e1a8..e0bcbb7 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -1002,7 +1002,8 @@
   if has_init_boot:
     banner("init_boot")
     init_boot_image = common.GetBootableImage(
-        "IMAGES/init_boot.img", "init_boot.img", OPTIONS.input_tmp, "INIT_BOOT")
+        "IMAGES/init_boot.img", "init_boot.img", OPTIONS.input_tmp, "INIT_BOOT",
+        dev_nodes=True)
     if init_boot_image:
       partitions['init_boot'] = os.path.join(
           OPTIONS.input_tmp, "IMAGES", "init_boot.img")
@@ -1133,6 +1134,21 @@
           item for item in vbmeta_partitions
           if item not in vbmeta_vendor.split()]
       vbmeta_partitions.append("vbmeta_vendor")
+    custom_avb_partitions = OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()
+    if custom_avb_partitions:
+      for avb_part in custom_avb_partitions:
+        partition_name = "vbmeta_" + avb_part
+        included_partitions = OPTIONS.info_dict.get("avb_vbmeta_{}".format(avb_part), "").strip().split()
+        assert included_partitions, "Custom vbmeta partition {0} missing avb_vbmeta_{0} prop".format(avb_part)
+        banner(partition_name)
+        logger.info("VBMeta partition {} needs {}".format(partition_name, included_partitions))
+        partitions[partition_name] = AddVBMeta(
+            output_zip, partitions, partition_name, included_partitions)
+        vbmeta_partitions = [
+            item for item in vbmeta_partitions
+            if item not in included_partitions]
+        vbmeta_partitions.append(partition_name)
+
 
     if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true":
       banner("vbmeta")
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index d7b0ba2..59c712e 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -63,6 +63,10 @@
     self.codename_to_api_level_map = codename_to_api_level_map
     self.debugfs_path = os.path.join(
         OPTIONS.search_path, "bin", "debugfs_static")
+    self.fsckerofs_path = os.path.join(
+        OPTIONS.search_path, "bin", "fsck.erofs")
+    self.blkid_path = os.path.join(
+        OPTIONS.search_path, "bin", "blkid_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
 
@@ -80,8 +84,8 @@
           "Couldn't find location of debugfs_static: " +
           "Path {} does not exist. ".format(self.debugfs_path) +
           "Make sure bin/debugfs_static can be found in -p <path>")
-    list_cmd = ['deapexer', '--debugfs_path',
-                self.debugfs_path, 'list', self.apex_path]
+    list_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
+                'list', self.apex_path]
     entries_names = common.RunAndCheckOutput(list_cmd).split()
     apk_entries = [name for name in entries_names if name.endswith('.apk')]
     sepolicy_entries = []
@@ -120,9 +124,21 @@
           "Couldn't find location of debugfs_static: " +
           "Path {} does not exist. ".format(self.debugfs_path) +
           "Make sure bin/debugfs_static can be found in -p <path>")
+    if not os.path.exists(self.fsckerofs_path):
+      raise ApexSigningError(
+          "Couldn't find location of fsck.erofs: " +
+          "Path {} does not exist. ".format(self.fsckerofs_path) +
+          "Make sure bin/fsck.erofs can be found in -p <path>")
+    if not os.path.exists(self.blkid_path):
+      raise ApexSigningError(
+          "Couldn't find location of blkid: " +
+          "Path {} does not exist. ".format(self.blkid_path) +
+          "Make sure bin/blkid can be found in -p <path>")
     payload_dir = common.MakeTempDir()
-    extract_cmd = ['deapexer', '--debugfs_path',
-                   self.debugfs_path, 'extract', self.apex_path, payload_dir]
+    extract_cmd = ['deapexer', '--debugfs_path', self.debugfs_path,
+                   '--fsckerofs_path', self.fsckerofs_path,
+                   '--blkid_path', self.blkid_path, 'extract',
+                   self.apex_path, payload_dir]
     common.RunAndCheckOutput(extract_cmd)
     assert os.path.exists(self.apex_path)
 
@@ -603,12 +619,14 @@
   debugfs_path = "debugfs"
   if OPTIONS.search_path:
     debugfs_path = os.path.join(OPTIONS.search_path, "bin", "debugfs_static")
+
   deapexer = 'deapexer'
   if OPTIONS.search_path:
     deapexer_path = os.path.join(OPTIONS.search_path, "bin", "deapexer")
     if os.path.isfile(deapexer_path):
       deapexer = deapexer_path
-  for apex_filename in os.listdir(target_dir):
+
+  for apex_filename in sorted(os.listdir(target_dir)):
     apex_filepath = os.path.join(target_dir, apex_filename)
     if not os.path.isfile(apex_filepath) or \
             not zipfile.is_zipfile(apex_filepath):
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index 211182a..8087fcd 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -1171,7 +1171,7 @@
           try:
             # Compresses with the default level
             compress_obj = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
-            compressed_data = (compress_obj.compress("".join(tgt_data))
+            compressed_data = (compress_obj.compress(b"".join(tgt_data))
                                + compress_obj.flush())
             compressed_size = len(compressed_data)
           except zlib.error as e:
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 7639ffd..9064136 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -232,11 +232,13 @@
             mount_point, total_blocks, used_blocks, headroom_blocks,
             adjusted_blocks))
 
+
 def CalculateSizeAndReserved(prop_dict, size):
   fs_type = prop_dict.get("fs_type", "")
   partition_headroom = int(prop_dict.get("partition_headroom", 0))
   # If not specified, give us 16MB margin for GetDiskUsage error ...
-  reserved_size = int(prop_dict.get("partition_reserved_size", BYTES_IN_MB * 16))
+  reserved_size = int(prop_dict.get(
+      "partition_reserved_size", BYTES_IN_MB * 16))
 
   if fs_type == "erofs":
     reserved_size = int(prop_dict.get("partition_reserved_size", 0))
@@ -249,6 +251,7 @@
 
   return size + reserved_size
 
+
 def BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config):
   """Builds a pure image for the files under in_dir and writes it to out_file.
 
@@ -410,7 +413,7 @@
       build_command.append("--casefold")
     if (needs_compress or prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--compression")
-    if (prop_dict.get("mount_point") != "data"):
+    if "ro_mount_point" in prop_dict:
       build_command.append("--readonly")
     if (prop_dict.get("f2fs_compress") == "true"):
       build_command.append("--sldc")
@@ -518,11 +521,12 @@
   disable_sparse = "disable_sparse" in prop_dict
   mkfs_output = None
   if (prop_dict.get("use_dynamic_partition_size") == "true" and
-      "partition_size" not in prop_dict):
+          "partition_size" not in prop_dict):
     # If partition_size is not defined, use output of `du' + reserved_size.
     # For compressed file system, it's better to use the compressed size to avoid wasting space.
     if fs_type.startswith("erofs"):
-      mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+      mkfs_output = BuildImageMkfs(
+          in_dir, prop_dict, out_file, target_out, fs_config)
       if "erofs_sparse_flag" in prop_dict and not disable_sparse:
         image_path = UnsparseImage(out_file, replace=False)
         size = GetDiskUsage(image_path)
@@ -612,7 +616,8 @@
     prop_dict["image_size"] = str(max_image_size)
 
   if not mkfs_output:
-    mkfs_output = BuildImageMkfs(in_dir, prop_dict, out_file, target_out, fs_config)
+    mkfs_output = BuildImageMkfs(
+        in_dir, prop_dict, out_file, target_out, fs_config)
 
   # Update the image (eg filesystem size). This can be different eg if mkfs
   # rounds the requested size down due to alignment.
@@ -629,6 +634,7 @@
   if verity_image_builder:
     verity_image_builder.Build(out_file)
 
+
 def ImagePropFromGlobalDict(glob_dict, mount_point):
   """Build an image property dictionary from the global dictionary.
 
@@ -738,7 +744,7 @@
       # This property is legacy and only used on a few partitions. b/202600377
       allowed_partitions = set(["system", "system_other", "data", "oem"])
       if mount_point not in allowed_partitions:
-          continue
+        continue
 
     if (mount_point == "system_other") and (dest_prop != "partition_size"):
       # Propagate system properties to system_other. They'll get overridden
@@ -757,6 +763,8 @@
     if not copy_prop(prop, "extfs_rsv_pct"):
       d["extfs_rsv_pct"] = "0"
 
+    d["ro_mount_point"] = "1"
+
   # Copy partition-specific properties.
   d["mount_point"] = mount_point
   if mount_point == "system":
@@ -791,6 +799,7 @@
 
 def GlobalDictFromImageProp(image_prop, mount_point):
   d = {}
+
   def copy_prop(src_p, dest_p):
     if src_p in image_prop:
       d[dest_p] = image_prop[src_p]
@@ -818,6 +827,56 @@
   return d
 
 
+def BuildVBMeta(in_dir, glob_dict, output_path):
+  """Creates a VBMeta image.
+
+  It generates the requested VBMeta image. The requested image could be for
+  top-level or chained VBMeta image, which is determined based on the name.
+
+  Args:
+    output_path: Path to generated vbmeta.img
+    partitions: A dict that's keyed by partition names with image paths as
+        values. Only valid partition names are accepted, as partitions listed
+        in common.AVB_PARTITIONS and custom partitions listed in
+        OPTIONS.info_dict.get("avb_custom_images_partition_list")
+    name: Name of the VBMeta partition, e.g. 'vbmeta', 'vbmeta_system'.
+    needed_partitions: Partitions whose descriptors should be included into the
+        generated VBMeta image.
+
+  Returns:
+    Path to the created image.
+
+  Raises:
+    AssertionError: On invalid input args.
+  """
+  vbmeta_partitions = common.AVB_PARTITIONS[:]
+  name = os.path.basename(output_path).rstrip(".img")
+  vbmeta_system = glob_dict.get("avb_vbmeta_system", "").strip()
+  vbmeta_vendor = glob_dict.get("avb_vbmeta_vendor", "").strip()
+  if "vbmeta_system" in name:
+    vbmeta_partitions = vbmeta_system.split()
+  elif "vbmeta_vendor" in name:
+    vbmeta_partitions = vbmeta_vendor.split()
+  else:
+    if vbmeta_system:
+      vbmeta_partitions = [
+          item for item in vbmeta_partitions
+          if item not in vbmeta_system.split()]
+      vbmeta_partitions.append("vbmeta_system")
+
+    if vbmeta_vendor:
+      vbmeta_partitions = [
+          item for item in vbmeta_partitions
+          if item not in vbmeta_vendor.split()]
+      vbmeta_partitions.append("vbmeta_vendor")
+
+
+  partitions = {part: os.path.join(in_dir, part + ".img")
+                for part in vbmeta_partitions}
+  partitions = {part:path for (part, path) in partitions.items() if os.path.exists(path)}
+  common.BuildVBMeta(output_path, partitions, name, vbmeta_partitions)
+
+
 def main(argv):
   args = common.ParseOptions(argv, __doc__)
 
@@ -864,14 +923,21 @@
       mount_point = "product"
     elif image_filename == "system_ext.img":
       mount_point = "system_ext"
+    elif "vbmeta" in image_filename:
+      mount_point = "vbmeta"
     else:
       logger.error("Unknown image file name %s", image_filename)
       sys.exit(1)
 
-    image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
+    if "vbmeta" != mount_point:
+      image_properties = ImagePropFromGlobalDict(glob_dict, mount_point)
 
   try:
-    BuildImage(in_dir, image_properties, out_file, target_out)
+    if "vbmeta" in os.path.basename(out_file):
+      OPTIONS.info_dict = glob_dict
+      BuildVBMeta(in_dir, glob_dict, out_file)
+    else:
+      BuildImage(in_dir, image_properties, out_file, target_out)
   except:
     logger.error("Failed to build %s from %s", out_file, in_dir)
     raise
diff --git a/tools/releasetools/check_target_files_vintf.py b/tools/releasetools/check_target_files_vintf.py
index c369a59..5b71c72 100755
--- a/tools/releasetools/check_target_files_vintf.py
+++ b/tools/releasetools/check_target_files_vintf.py
@@ -129,8 +129,8 @@
 
   dirmap = GetDirmap(input_tmp)
 
-  apex_root, apex_info_file = PrepareApexDirectory(input_tmp)
-  dirmap['/apex'] = apex_root
+  # Simulate apexd from target-files.
+  dirmap['/apex'] = PrepareApexDirectory(input_tmp)
 
   args_for_skus = GetArgsForSkus(info_dict)
   shipping_api_level_args = GetArgsForShippingApiLevel(info_dict)
@@ -140,7 +140,6 @@
       'checkvintf',
       '--check-compat',
   ]
-  common_command += ['--apex-info-file', apex_info_file]
 
   for device_path, real_path in sorted(dirmap.items()):
     common_command += ['--dirmap', '{}:{}'.format(device_path, real_path)]
@@ -206,34 +205,40 @@
   return patterns
 
 def PrepareApexDirectory(inp):
-  """ Prepare the APEX data.
+  """ Prepare /apex directory before running checkvintf
 
   Apex binaries do not support dirmaps, in order to use these binaries we
   need to move the APEXes from the extracted target file archives to the
   expected device locations.
 
-  The APEXes will also be extracted under the APEX/ directory
-  matching what would be on the target.
+  This simulates how apexd activates APEXes.
+  1. create {inp}/APEX which is treated as a "/" on device.
+  2. copy apexes from target-files to {root}/{partition}/apex.
+  3. mount apexes under {root}/{partition}/apex at {root}/apex.
+  4. generate info files with dump_apex_info.
 
-  Create the following structure under the input inp directory:
-       APEX/apex             # Extracted APEXes
-       APEX/system/apex/     # System APEXes
-       APEX/vendor/apex/     # Vendor APEXes
+  We'll get the following layout
+       {inp}/APEX/apex             # Activated APEXes + some info files
+       {inp}/APEX/system/apex      # System APEXes
+       {inp}/APEX/vendor/apex      # Vendor APEXes
        ...
 
   Args:
     inp: path to the directory that contains the extracted target files archive.
 
   Returns:
-    extracted apex directory
-    apex-info-list.xml file
+    directory representing /apex on device
   """
 
-  debugfs_path = 'debugfs'
   deapexer = 'deapexer'
+  debugfs_path = 'debugfs'
+  blkid_path = 'blkid'
+  fsckerofs_path = 'fsck.erofs'
   if OPTIONS.search_path:
     debugfs_path = os.path.join(OPTIONS.search_path, 'bin', 'debugfs_static')
     deapexer_path = os.path.join(OPTIONS.search_path, 'bin', 'deapexer')
+    blkid_path = os.path.join(OPTIONS.search_path, 'bin', 'blkid_static')
+    fsckerofs_path = os.path.join(OPTIONS.search_path, 'bin', 'fsck.erofs')
     if os.path.isfile(deapexer_path):
       deapexer = deapexer_path
 
@@ -257,6 +262,8 @@
 
         cmd = [deapexer,
                '--debugfs_path', debugfs_path,
+               '--fsckerofs_path', fsckerofs_path,
+               '--blkid_path', blkid_path,
                'extract',
                apex,
                os.path.join(outp, info['name'])]
@@ -267,35 +274,34 @@
   root_dir_name = 'APEX'
   root_dir = os.path.join(inp, root_dir_name)
   extracted_root = os.path.join(root_dir, 'apex')
-  apex_info_file = os.path.join(extracted_root, 'apex-info-list.xml')
 
-  # Always create APEX directory for dirmap
+  # Always create /apex directory for dirmap
   os.makedirs(extracted_root)
 
   create_info_file = False
 
   # Loop through search path looking for and processing apex/ directories.
   for device_path, target_files_rel_paths in DIR_SEARCH_PATHS.items():
+    # checkvintf only needs vendor apexes. skip other partitions for efficiency
+    if device_path not in ['/vendor', '/odm']:
+      continue
+    # First, copy VENDOR/apex/foo.apex to APEX/vendor/apex/foo.apex
+    # Then, extract the contents to APEX/apex/foo/
     for target_files_rel_path in target_files_rel_paths:
       inp_partition = os.path.join(inp, target_files_rel_path,"apex")
       if os.path.exists(inp_partition):
         apex_dir = root_dir + os.path.join(device_path + "/apex");
-        os.makedirs(apex_dir)
-        os.rename(inp_partition, apex_dir)
+        os.makedirs(root_dir + device_path)
+        shutil.copytree(inp_partition, apex_dir, symlinks=True)
         ExtractApexes(apex_dir, extracted_root)
         create_info_file = True
 
   if create_info_file:
-    ### Create apex-info-list.xml
-    dump_cmd = ['dump_apex_info',
-                '--root_dir', root_dir,
-                '--out_file', apex_info_file]
+    ### Dump apex info files
+    dump_cmd = ['dump_apex_info', '--root_dir', root_dir]
     common.RunAndCheckOutput(dump_cmd)
-    if not os.path.exists(apex_info_file):
-      raise RuntimeError('Failed to create apex info file %s', apex_info_file)
-    logger.info('Created %s', apex_info_file)
 
-  return extracted_root, apex_info_file
+  return extracted_root
 
 def CheckVintfFromTargetFiles(inp, info_dict=None):
   """
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 715802f..a9dcd96 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -302,6 +302,8 @@
   Raises:
     ExternalError: On non-zero exit from the command.
   """
+  if verbose is None:
+    verbose = OPTIONS.verbose
   proc = Run(args, verbose=verbose, **kwargs)
   output, _ = proc.communicate()
   if output is None:
@@ -1503,12 +1505,14 @@
 
   custom_partitions = OPTIONS.info_dict.get(
       "avb_custom_images_partition_list", "").strip().split()
+  custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()]
 
   for partition, path in partitions.items():
     if partition not in needed_partitions:
       continue
     assert (partition in AVB_PARTITIONS or
             partition in AVB_VBMETA_PARTITIONS or
+            partition in custom_avb_partitions or
             partition in custom_partitions), \
         'Unknown partition: {}'.format(partition)
     assert os.path.exists(path), \
@@ -1543,14 +1547,20 @@
 
 
 def _MakeRamdisk(sourcedir, fs_config_file=None,
+                 dev_node_file=None,
                  ramdisk_format=RamdiskFormat.GZ):
   ramdisk_img = tempfile.NamedTemporaryFile()
 
-  if fs_config_file is not None and os.access(fs_config_file, os.F_OK):
-    cmd = ["mkbootfs", "-f", fs_config_file,
-           os.path.join(sourcedir, "RAMDISK")]
-  else:
-    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
+  cmd = ["mkbootfs"]
+
+  if fs_config_file and os.access(fs_config_file, os.F_OK):
+    cmd.extend(["-f", fs_config_file])
+
+  if dev_node_file and os.access(dev_node_file, os.F_OK):
+    cmd.extend(["-n", dev_node_file])
+
+  cmd.append(os.path.join(sourcedir, "RAMDISK"))
+
   p1 = Run(cmd, stdout=subprocess.PIPE)
   if ramdisk_format == RamdiskFormat.LZ4:
     p2 = Run(["lz4", "-l", "-12", "--favor-decSpeed"], stdin=p1.stdout,
@@ -1568,7 +1578,8 @@
   return ramdisk_img
 
 
-def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None,
+def _BuildBootableImage(image_name, sourcedir, fs_config_file,
+                        dev_node_file=None, info_dict=None,
                         has_ramdisk=False, two_step_image=False):
   """Build a bootable image from the specified sourcedir.
 
@@ -1610,7 +1621,7 @@
 
   if has_ramdisk:
     ramdisk_format = GetRamdiskFormat(info_dict)
-    ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
+    ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file, dev_node_file,
                                ramdisk_format=ramdisk_format)
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
@@ -1818,7 +1829,8 @@
 
 
 def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
-                     info_dict=None, two_step_image=False):
+                     info_dict=None, two_step_image=False,
+                     dev_nodes=False):
   """Return a File object with the desired bootable image.
 
   Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name',
@@ -1854,6 +1866,8 @@
   fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
   data = _BuildBootableImage(prebuilt_name, os.path.join(unpack_dir, tree_subdir),
                              os.path.join(unpack_dir, fs_config),
+                             os.path.join(unpack_dir, 'META/ramdisk_node_list')
+                                if dev_nodes else None,
                              info_dict, has_ramdisk, two_step_image)
   if data:
     return File(name, data)
@@ -2107,9 +2121,7 @@
   if info_dict is None:
     info_dict = LoadInfoDict(input_zip)
 
-  is_sparse = info_dict.get("extfs_sparse_flag")
-  if info_dict.get(which + "_disable_sparse"):
-    is_sparse = False
+  is_sparse = IsSparseImage(os.path.join(tmpdir, "IMAGES", which + ".img"))
 
   # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
   # shared blocks (i.e. some blocks will show up in multiple files' block
@@ -2908,13 +2920,12 @@
 
     fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
     os.close(fd)
+    cmd = ["zip2zip", "-i", zip_filename, "-o", new_zipfile]
+    for entry in entries:
+      cmd.append("-x")
+      cmd.append(entry)
+    RunAndCheckOutput(cmd)
 
-    with zipfile.ZipFile(new_zipfile, 'w') as zout:
-      for item in zin.infolist():
-        if item.filename in entries:
-          continue
-        buffer = zin.read(item.filename)
-        zout.writestr(item, buffer)
 
   os.replace(new_zipfile, zip_filename)
 
@@ -3623,11 +3634,13 @@
 
   else:
     system_root_image = info_dict.get("system_root_image") == "true"
+    include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
+    include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
     path = os.path.join(input_dir, recovery_resource_dat_path)
     # With system-root-image, boot and recovery images will have mismatching
     # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
     # to handle such a case.
-    if system_root_image:
+    if system_root_image or include_recovery_dtbo or include_recovery_acpio:
       diff_program = ["bsdiff"]
       bonus_args = ""
       assert not os.path.exists(path)
@@ -4017,3 +4030,26 @@
     # Magic for android sparse image format
     # https://source.android.com/devices/bootloader/images
     return fp.read(4) == b'\x3A\xFF\x26\xED'
+
+def ParseUpdateEngineConfig(path: str):
+  """Parse the update_engine config stored in file `path`
+  Args
+    path: Path to update_engine_config.txt file in target_files
+
+  Returns
+    A tuple of (major, minor) version number . E.g. (2, 8)
+  """
+  with open(path, "r") as fp:
+    # update_engine_config.txt is only supposed to contain two lines,
+    # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
+    # sufficient. If the length is more than that, something is wrong.
+    data = fp.read(1024)
+    major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
+    if not major:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
+    minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
+    if not minor:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
+    return (int(major.group(1)), int(minor.group(1)))
\ No newline at end of file
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
new file mode 100644
index 0000000..44f0a95
--- /dev/null
+++ b/tools/releasetools/create_brick_ota.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+from pathlib import Path
+import zipfile
+from typing import List
+import common
+import tempfile
+import shutil
+
+PARTITIONS_TO_WIPE = ["/dev/block/by-name/vbmeta",
+                      "/dev/block/by-name/vbmeta_a",
+                      "/dev/block/by-name/vbmeta_b",
+                      "/dev/block/by-name/vbmeta_system_a",
+                      "/dev/block/by-name/vbmeta_system_b",
+                      "/dev/block/by-name/boot",
+                      "/dev/block/by-name/boot_a",
+                      "/dev/block/by-name/boot_b",
+                      "/dev/block/by-name/vendor_boot",
+                      "/dev/block/by-name/vendor_boot_a",
+                      "/dev/block/by-name/vendor_boot_b",
+                      "/dev/block/by-name/init_boot_a",
+                      "/dev/block/by-name/init_boot_b",
+                      "/dev/block/by-name/metadata",
+                      "/dev/block/by-name/super",
+                      "/dev/block/by-name/userdata"]
+
+
+def CreateBrickOta(product_name: str, output_path: Path, extra_wipe_partitions: str, serialno: str):
+  partitions_to_wipe = PARTITIONS_TO_WIPE
+  if extra_wipe_partitions is not None:
+    partitions_to_wipe = PARTITIONS_TO_WIPE + extra_wipe_partitions.split(",")
+  # recovery requiers product name to be a | separated list
+  product_name = product_name.replace(",", "|")
+  with zipfile.ZipFile(output_path, "w") as zfp:
+    zfp.writestr("recovery.wipe", "\n".join(partitions_to_wipe))
+    zfp.writestr("payload.bin", "")
+    zfp.writestr("META-INF/com/android/metadata", "\n".join(
+        ["ota-type=BRICK", "post-timestamp=9999999999", "pre-device=" + product_name, "serialno=" + serialno]))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Android Brick OTA generator')
+  parser.add_argument('otafile', metavar='PAYLOAD', type=str,
+                      help='The output OTA package file.')
+  parser.add_argument('--product', type=str,
+                      help='The product name of the device, for example, bramble, redfin. This can be a comma separated list.', required=True)
+  parser.add_argument('--serialno', type=str,
+                      help='The serial number of devices that are allowed to install this OTA package. This can be a comma separated list.')
+  parser.add_argument('--extra_wipe_partitions', type=str,
+                      help='Additional partitions on device which should be wiped.')
+  parser.add_argument('-v', action="store_true",
+                      help="Enable verbose logging", dest="verbose")
+  parser.add_argument('--package_key', type=str,
+                      help='Paths to private key for signing payload')
+  parser.add_argument('--search_path', type=str,
+                      help='Search path for framework/signapk.jar')
+  parser.add_argument('--private_key_suffix', type=str,
+                      help='Suffix to be appended to package_key path', default=".pk8")
+  args = parser.parse_args(argv[1:])
+  if args.search_path:
+    common.OPTIONS.search_path = args.search_path
+  if args.verbose:
+    common.OPTIONS.verbose = args.verbose
+  CreateBrickOta(args.product, args.otafile,
+                 args.extra_wipe_partitions, args.serialno)
+  if args.package_key:
+    common.OPTIONS.private_key_suffix = args.private_key_suffix
+    with tempfile.NamedTemporaryFile() as tmpfile:
+      common.SignFile(args.otafile, tmpfile.name,
+                      args.package_key, None, whole_file=True)
+      shutil.copy(tmpfile.name, args.otafile)
+
+
+if __name__ == "__main__":
+  import sys
+  main(sys.argv)
diff --git a/tools/releasetools/merge/merge_dexopt.py b/tools/releasetools/merge/merge_dexopt.py
index 7bf9bd4..16182b5 100644
--- a/tools/releasetools/merge/merge_dexopt.py
+++ b/tools/releasetools/merge/merge_dexopt.py
@@ -164,6 +164,10 @@
           'deapexer',
           '--debugfs_path',
           'debugfs_static',
+          '--blkid_path',
+          'blkid',
+          '--fsckerofs_path',
+          'fsck.erofs',
           'extract',
           apex,
           apex_extract_dir,
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index 580b3ce..b61f039 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -29,6 +29,7 @@
 import merge_utils
 import sparse_img
 import verity_utils
+from ota_utils import ParseUpdateEngineConfig
 
 from common import ExternalError
 
@@ -52,20 +53,41 @@
 MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
 
 
+
+
+def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
+  UPDATE_ENGINE_CONFIG_NAME = "update_engine_config.txt"
+  config1_path = os.path.join(
+      input_metadir1, UPDATE_ENGINE_CONFIG_NAME)
+  config2_path = os.path.join(
+      input_metadir2, UPDATE_ENGINE_CONFIG_NAME)
+  config1 = ParseUpdateEngineConfig(config1_path)
+  config2 = ParseUpdateEngineConfig(config2_path)
+  # Copy older config to merged target files for maximum compatibility
+  # update_engine in system partition is from system side, but
+  # update_engine_sideload in recovery is from vendor side.
+  if config1 < config2:
+    shutil.copy(config1_path, os.path.join(
+        merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+  else:
+    shutil.copy(config2_path, os.path.join(
+        merged_meta_dir, UPDATE_ENGINE_CONFIG_NAME))
+
+
 def MergeMetaFiles(temp_dir, merged_dir):
   """Merges various files in META/*."""
 
   framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=os.path.dirname(framework_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   vendor_meta_dir = os.path.join(temp_dir, 'vendor_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=os.path.dirname(vendor_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   merged_meta_dir = os.path.join(merged_dir, 'META')
 
@@ -102,6 +124,11 @@
         merged_meta_dir=merged_meta_dir,
         file_name=file_name)
 
+  MergeUpdateEngineConfig(
+      framework_meta_dir,
+      vendor_meta_dir, merged_meta_dir,
+  )
+
   # Write the now-finalized OPTIONS.merged_misc_info.
   merge_utils.WriteSortedData(
       data=OPTIONS.merged_misc_info,
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index c95cead..ba2b14f 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -26,9 +26,9 @@
 
 Usage: merge_target_files [args]
 
-  --framework-target-files framework-target-files-zip-archive
+  --framework-target-files framework-target-files-package
       The input target files package containing framework bits. This is a zip
-      archive.
+      archive or a directory.
 
   --framework-item-list framework-item-list-file
       The optional path to a newline-separated config file of items that
@@ -38,9 +38,9 @@
       The optional path to a newline-separated config file of keys to
       extract from the framework META/misc_info.txt file.
 
-  --vendor-target-files vendor-target-files-zip-archive
+  --vendor-target-files vendor-target-files-package
       The input target files package containing vendor bits. This is a zip
-      archive.
+      archive or a directory.
 
   --vendor-item-list vendor-item-list-file
       The optional path to a newline-separated config file of items that
@@ -156,6 +156,28 @@
     shutil.move(source, destination)
 
 
+def remove_file_if_exists(file_name):
+  """Remove the file if it exists and skip otherwise."""
+
+  try:
+    os.remove(file_name)
+  except FileNotFoundError:
+    pass
+
+
+def include_meta_in_list(item_list):
+  """Include all `META/*` files in the item list.
+
+  To ensure that `AddImagesToTargetFiles` can still be used with vendor item
+  list that do not specify all of the required META/ files, those files should
+  be included by default. This preserves the backward compatibility of
+  `rebuild_image_with_sepolicy`.
+  """
+  if not item_list:
+    return None
+  return list(item_list) + ['META/*']
+
+
 def create_merged_package(temp_dir):
   """Merges two target files packages into one target files structure.
 
@@ -163,18 +185,18 @@
     Path to merged package under temp directory.
   """
   # Extract "as is" items from the input framework and vendor partial target
-  # files packages directly into the output temporary directory, since these items
-  # do not need special case processing.
+  # files packages directly into the output temporary directory, since these
+  # items do not need special case processing.
 
   output_target_files_temp_dir = os.path.join(temp_dir, 'output')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.framework_item_list)
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+      item_list=OPTIONS.framework_item_list)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.vendor_item_list)
+      item_list=OPTIONS.vendor_item_list)
 
   # Perform special case processing on META/* items.
   # After this function completes successfully, all the files we need to create
@@ -210,8 +232,7 @@
   If odm is present then odm is preferred -- otherwise vendor is used.
   """
   partition = 'vendor'
-  if os.path.exists(os.path.join(target_files_dir, 'ODM')) or os.path.exists(
-      os.path.join(target_files_dir, 'IMAGES/odm.img')):
+  if os.path.exists(os.path.join(target_files_dir, 'ODM')):
     partition = 'odm'
   partition_img = '{}.img'.format(partition)
   partition_map = '{}.map'.format(partition)
@@ -223,7 +244,8 @@
   def copy_selinux_file(input_path, output_filename):
     input_filename = os.path.join(target_files_dir, input_path)
     if not os.path.exists(input_filename):
-      input_filename = input_filename.replace('SYSTEM_EXT/', 'SYSTEM/system_ext/') \
+      input_filename = input_filename.replace('SYSTEM_EXT/',
+                                              'SYSTEM/system_ext/') \
           .replace('PRODUCT/', 'SYSTEM/product/')
       if not os.path.exists(input_filename):
         logger.info('Skipping copy_selinux_file for %s', input_filename)
@@ -245,7 +267,8 @@
   if not OPTIONS.vendor_otatools:
     # Remove the partition from the merged target-files archive. It will be
     # rebuilt later automatically by generate_missing_images().
-    os.remove(os.path.join(target_files_dir, 'IMAGES', partition_img))
+    remove_file_if_exists(
+        os.path.join(target_files_dir, 'IMAGES', partition_img))
     return
 
   # TODO(b/192253131): Remove the need for vendor_otatools by fixing
@@ -263,7 +286,10 @@
   vendor_target_files_dir = common.MakeTempDir(
       prefix='merge_target_files_vendor_target_files_')
   common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
-  common.UnzipToDir(OPTIONS.vendor_target_files, vendor_target_files_dir)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
+      output_dir=vendor_target_files_dir,
+      item_list=include_meta_in_list(OPTIONS.vendor_item_list))
 
   # Copy the partition contents from the merged target-files archive to the
   # vendor target-files archive.
@@ -274,7 +300,8 @@
       symlinks=True)
 
   # Delete then rebuild the partition.
-  os.remove(os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
+  remove_file_if_exists(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
   rebuild_partition_command = [
       os.path.join(vendor_otatools_dir, 'bin', 'add_img_to_target_files'),
       '--verbose',
@@ -293,8 +320,9 @@
   shutil.move(
       os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
       os.path.join(target_files_dir, 'IMAGES', partition_img))
-  move_only_exists(os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
-        os.path.join(target_files_dir, 'IMAGES', partition_map))
+  move_only_exists(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+      os.path.join(target_files_dir, 'IMAGES', partition_map))
 
   def copy_recovery_file(filename):
     for subdir in ('VENDOR', 'SYSTEM/vendor'):
@@ -568,10 +596,10 @@
     common.Usage(__doc__)
     sys.exit(1)
 
-  with zipfile.ZipFile(OPTIONS.framework_target_files, allowZip64=True) as fz:
-    framework_namelist = fz.namelist()
-  with zipfile.ZipFile(OPTIONS.vendor_target_files, allowZip64=True) as vz:
-    vendor_namelist = vz.namelist()
+  framework_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.framework_target_files)
+  vendor_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.vendor_target_files)
 
   if OPTIONS.framework_item_list:
     OPTIONS.framework_item_list = common.LoadListFromFile(
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index e253b02..c284338 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -49,28 +49,80 @@
   common.UnzipToDir(input_zip, output_dir, filtered_extract_item_list)
 
 
-def CopyItems(from_dir, to_dir, patterns):
-  """Similar to ExtractItems() except uses an input dir instead of zip."""
-  file_paths = []
-  for dirpath, _, filenames in os.walk(from_dir):
-    file_paths.extend(
-        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
-        for filename in filenames)
+def CopyItems(from_dir, to_dir, copy_item_list):
+  """Copies the items in copy_item_list from source to destination directory.
 
-  filtered_file_paths = set()
-  for pattern in patterns:
-    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
+  copy_item_list may include files and directories. Will copy the matched
+  files and create the matched directories.
 
-  for file_path in filtered_file_paths:
-    original_file_path = os.path.join(from_dir, file_path)
-    copied_file_path = os.path.join(to_dir, file_path)
-    copied_file_dir = os.path.dirname(copied_file_path)
-    if not os.path.exists(copied_file_dir):
-      os.makedirs(copied_file_dir)
-    if os.path.islink(original_file_path):
-      os.symlink(os.readlink(original_file_path), copied_file_path)
+  Args:
+    from_dir: The source directory.
+    to_dir: The destination directory.
+    copy_item_list: Items to be copied.
+  """
+  item_paths = []
+  for root, dirs, files in os.walk(from_dir):
+    item_paths.extend(
+        os.path.relpath(path=os.path.join(root, item_name), start=from_dir)
+        for item_name in files + dirs)
+
+  filtered = set()
+  for pattern in copy_item_list:
+    filtered.update(fnmatch.filter(item_paths, pattern))
+
+  for item in filtered:
+    original_path = os.path.join(from_dir, item)
+    copied_path = os.path.join(to_dir, item)
+    copied_parent_path = os.path.dirname(copied_path)
+    if not os.path.exists(copied_parent_path):
+      os.makedirs(copied_parent_path)
+    if os.path.islink(original_path):
+      os.symlink(os.readlink(original_path), copied_path)
+    elif os.path.isdir(original_path):
+      if not os.path.exists(copied_path):
+        os.makedirs(copied_path)
     else:
-      shutil.copyfile(original_file_path, copied_file_path)
+      shutil.copyfile(original_path, copied_path)
+
+
+def GetTargetFilesItems(target_files_zipfile_or_dir):
+  """Gets a list of target files items."""
+  if zipfile.is_zipfile(target_files_zipfile_or_dir):
+    with zipfile.ZipFile(target_files_zipfile_or_dir, allowZip64=True) as fz:
+      return fz.namelist()
+  elif os.path.isdir(target_files_zipfile_or_dir):
+    item_list = []
+    for root, dirs, files in os.walk(target_files_zipfile_or_dir):
+      item_list.extend(
+          os.path.relpath(path=os.path.join(root, item),
+                          start=target_files_zipfile_or_dir)
+          for item in dirs + files)
+    return item_list
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
+
+
+def CollectTargetFiles(input_zipfile_or_dir, output_dir, item_list=None):
+  """Extracts input zipfile or copy input directory to output directory.
+
+  Extracts the input zipfile if `input_zipfile_or_dir` is a zip archive, or
+  copies the items if `input_zipfile_or_dir` is a directory.
+
+  Args:
+    input_zipfile_or_dir: The input target files, could be either a zipfile to
+      extract or a directory to copy.
+    output_dir: The output directory that the input files are either extracted
+      or copied.
+    item_list: Files to be extracted or copied. Will extract or copy all files
+      if omitted.
+  """
+  patterns = item_list if item_list else ('*',)
+  if zipfile.is_zipfile(input_zipfile_or_dir):
+    ExtractItems(input_zipfile_or_dir, output_dir, patterns)
+  elif os.path.isdir(input_zipfile_or_dir):
+    CopyItems(input_zipfile_or_dir, output_dir, patterns)
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
 
 
 def WriteSortedData(data, path):
@@ -178,7 +230,6 @@
     item_set.update([
         'META/liblz4.so',
         'META/postinstall_config.txt',
-        'META/update_engine_config.txt',
         'META/zucchini_config.txt',
     ])
   else:  # vendor
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index eceb734..b4c47ae 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -35,22 +35,27 @@
       open(path, 'a').close()
       return path
 
+    def createEmptyFolder(path):
+      os.makedirs(path)
+      return path
+
     def createSymLink(source, dest):
       os.symlink(source, dest)
       return dest
 
     def getRelPaths(start, filepaths):
       return set(
-          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
+          os.path.relpath(path=filepath, start=start)
+          for filepath in filepaths)
 
     input_dir = common.MakeTempDir()
     output_dir = common.MakeTempDir()
     expected_copied_items = []
     actual_copied_items = []
-    patterns = ['*.cpp', 'subdir/*.txt']
+    patterns = ['*.cpp', 'subdir/*.txt', 'subdir/empty_dir']
 
-    # Create various files that we expect to get copied because they
-    # match one of the patterns.
+    # Create various files and empty directories that we expect to get copied
+    # because they match one of the patterns.
     expected_copied_items.extend([
         createEmptyFile(os.path.join(input_dir, 'a.cpp')),
         createEmptyFile(os.path.join(input_dir, 'b.cpp')),
@@ -58,6 +63,7 @@
         createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
         createEmptyFile(
             os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
+        createEmptyFolder(os.path.join(input_dir, 'subdir', 'empty_dir')),
         createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
     ])
     # Create some more files that we expect to not get copied.
@@ -70,9 +76,13 @@
     merge_utils.CopyItems(input_dir, output_dir, patterns)
 
     # Assert the actual copied items match the ones we expected.
-    for dirpath, _, filenames in os.walk(output_dir):
+    for root_dir, dirs, files in os.walk(output_dir):
       actual_copied_items.extend(
-          os.path.join(dirpath, filename) for filename in filenames)
+          os.path.join(root_dir, filename) for filename in files)
+      for dirname in dirs:
+        dir_path = os.path.join(root_dir, dirname)
+        if not os.listdir(dir_path):
+          actual_copied_items.append(dir_path)
     self.assertEqual(
         getRelPaths(output_dir, actual_copied_items),
         getRelPaths(input_dir, expected_copied_items))
@@ -142,7 +152,6 @@
         'META/liblz4.so',
         'META/postinstall_config.txt',
         'META/product_filesystem_config.txt',
-        'META/update_engine_config.txt',
         'META/zucchini_config.txt',
         'PRODUCT/*',
         'SYSTEM/*',
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 6c927ec..c4fd809 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -48,17 +48,12 @@
     # if the filesystem is ext4.
     partition_source_info = source_info["fstab"]["/" + name]
     check_first_block = partition_source_info.fs_type == "ext4"
-    # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
-    # in zip formats. However with squashfs, a) all files are compressed in LZ4;
-    # b) the blocks listed in block map may not contain all the bytes for a
-    # given file (because they're rounded to be 4K-aligned).
-    partition_target_info = target_info["fstab"]["/" + name]
-    disable_imgdiff = (partition_source_info.fs_type == "squashfs" or
-                       partition_target_info.fs_type == "squashfs")
+    # Disable imgdiff because it relies on zlib to produce stable output
+    # across different versions, which is often not the case.
     return common.BlockDifference(name, partition_tgt, partition_src,
                                   check_first_block,
                                   version=blockimgdiff_version,
-                                  disable_imgdiff=disable_imgdiff)
+                                  disable_imgdiff=True)
 
   if source_zip:
     # See notes in common.GetUserImage()
@@ -282,7 +277,7 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
 
 
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
@@ -409,7 +404,7 @@
   if updating_boot:
     boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot",
                                                               source_info)
-    d = common.Difference(target_boot, source_boot)
+    d = common.Difference(target_boot, source_boot, "bsdiff")
     _, _, d = d.ComputePatch()
     if d is None:
       include_full_boot = True
@@ -537,7 +532,7 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
 
 
 def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 9d5c67d..7307ec2 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -205,7 +205,8 @@
 
   --partial "<PARTITION> [<PARTITION>[...]]"
       Generate partial updates, overriding ab_partitions list with the given
-      list.
+      list. Specify --partial= without partition list to let tooling auto detect
+      partial partition list.
 
   --custom_image <custom_partition=custom_image>
       Use the specified custom_image to update custom_partition when generating
@@ -422,6 +423,13 @@
   slot will be used. This is to ensure that we always have valid boot, vbmeta,
   bootloader images in the inactive slot.
 
+  After writing system_other to inactive slot's system partiiton,
+  PackageManagerService will read `ro.cp_system_other_odex`, and set
+  `sys.cppreopt` to "requested". Then, according to
+  system/extras/cppreopts/cppreopts.rc , init will mount system_other at
+  /postinstall, and execute `cppreopts` to copy optimized APKs from
+  /postinstall to /data .
+
   Args:
     input_file: The input target-files.zip file.
     skip_postinstall: Whether to skip copying the postinstall config file.
@@ -726,30 +734,34 @@
 
   Returns:
     The filename of a target-files.zip which has renamed the custom images in
-    the IMAGS/ to their partition names.
+    the IMAGES/ to their partition names.
   """
-  # Use zip2zip to avoid extracting the zipfile.
+
+  # First pass: use zip2zip to copy the target files contents, excluding
+  # the "custom" images that will be replaced.
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   cmd = ['zip2zip', '-i', input_file, '-o', target_file]
 
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    namelist = input_zip.namelist()
-
-  # Write {custom_image}.img as {custom_partition}.img.
+  images = {}
   for custom_partition, custom_image in custom_images.items():
     default_custom_image = '{}.img'.format(custom_partition)
     if default_custom_image != custom_image:
-      logger.info("Update custom partition '%s' with '%s'",
-                  custom_partition, custom_image)
-      # Default custom image need to be deleted first.
-      namelist.remove('IMAGES/{}'.format(default_custom_image))
-      # IMAGES/{custom_image}.img:IMAGES/{custom_partition}.img.
-      cmd.extend(['IMAGES/{}:IMAGES/{}'.format(custom_image,
-                                               default_custom_image)])
+      src = 'IMAGES/' + custom_image
+      dst = 'IMAGES/' + default_custom_image
+      cmd.extend(['-x', dst])
+      images[dst] = src
 
-  cmd.extend(['{}:{}'.format(name, name) for name in namelist])
   common.RunAndCheckOutput(cmd)
 
+  # Second pass: write {custom_image}.img as {custom_partition}.img.
+  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+    with zipfile.ZipFile(target_file, 'a', allowZip64=True) as output_zip:
+      for dst, src in images.items():
+        data = input_zip.read(src)
+        logger.info("Update custom partition '%s'", dst)
+        common.ZipWriteStr(output_zip, dst, data)
+      output_zip.close()
+
   return target_file
 
 
@@ -856,6 +868,31 @@
     target_info = common.BuildInfo(OPTIONS.info_dict, OPTIONS.oem_dicts)
     source_info = None
 
+  if OPTIONS.partial == []:
+    logger.info(
+        "Automatically detecting partial partition list from input target files.")
+    OPTIONS.partial = target_info.get(
+        "partial_ota_update_partitions_list").split()
+    assert OPTIONS.partial, "Input target_file does not have"
+    " partial_ota_update_partitions_list defined, failed to auto detect partial"
+    " partition list. Please specify list of partitions to update manually via"
+    " --partial=a,b,c , or generate a complete OTA by removing the --partial"
+    " option"
+    OPTIONS.partial.sort()
+    if source_info:
+      source_partial_list = source_info.get(
+          "partial_ota_update_partitions_list").split()
+      if source_partial_list:
+        source_partial_list.sort()
+        if source_partial_list != OPTIONS.partial:
+          logger.warning("Source build and target build have different partial partition lists. Source: %s, target: %s, taking the intersection.",
+                         source_partial_list, OPTIONS.partial)
+          OPTIONS.partial = list(
+              set(OPTIONS.partial) and set(source_partial_list))
+          OPTIONS.partial.sort()
+    logger.info("Automatically deduced partial partition list: %s",
+                OPTIONS.partial)
+
   if target_info.vendor_suppressed_vabc:
     logger.info("Vendor suppressed VABC. Disabling")
     OPTIONS.disable_vabc = True
@@ -867,6 +904,11 @@
           (source_info is not None and not source_info.is_vabc_xor):
     logger.info("VABC XOR Not supported, disabling")
     OPTIONS.enable_vabc_xor = False
+
+  if OPTIONS.vabc_compression_param == "none":
+    logger.info(
+        "VABC Compression algorithm is set to 'none', disabling VABC xor")
+    OPTIONS.enable_vabc_xor = False
   additional_args = []
 
   # Prepare custom images.
@@ -881,7 +923,6 @@
   elif OPTIONS.partial:
     target_file = GetTargetFilesZipForPartialUpdates(target_file,
                                                      OPTIONS.partial)
-    additional_args += ["--is_partial_update", "true"]
   elif OPTIONS.vabc_compression_param:
     target_file = GetTargetFilesZipForCustomVABCCompression(
         target_file, OPTIONS.vabc_compression_param)
@@ -897,7 +938,8 @@
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
+  payload = PayloadGenerator(
+      wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -924,7 +966,7 @@
 
   additional_args += ["--security_patch_level", security_patch_level]
 
-  additional_args += ["--enable_zucchini",
+  additional_args += ["--enable_zucchini=" +
                       str(OPTIONS.enable_zucchini).lower()]
 
   if not ota_utils.IsLz4diffCompatible(source_file, target_file):
@@ -932,7 +974,7 @@
         "Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
     OPTIONS.enable_lz4diff = False
 
-  additional_args += ["--enable_lz4diff",
+  additional_args += ["--enable_lz4diff=" +
                       str(OPTIONS.enable_lz4diff).lower()]
 
   if source_file and OPTIONS.enable_lz4diff:
@@ -948,20 +990,13 @@
     additional_args += ["--erofs_compression_param", erofs_compression_param]
 
   if OPTIONS.disable_vabc:
-    additional_args += ["--disable_vabc", "true"]
+    additional_args += ["--disable_vabc=true"]
   if OPTIONS.enable_vabc_xor:
-    additional_args += ["--enable_vabc_xor", "true"]
-  if OPTIONS.force_minor_version:
-    additional_args += ["--force_minor_version", OPTIONS.force_minor_version]
+    additional_args += ["--enable_vabc_xor=true"]
   if OPTIONS.compressor_types:
     additional_args += ["--compressor_types", OPTIONS.compressor_types]
   additional_args += ["--max_timestamp", max_timestamp]
 
-  if SupportsMainlineGkiUpdates(source_file):
-    logger.warning(
-        "Detected build with mainline GKI, include full boot image.")
-    additional_args.extend(["--full_boot", "true"])
-
   payload.Generate(
       target_file,
       source_file,
@@ -1107,9 +1142,12 @@
     elif o == "--boot_variable_file":
       OPTIONS.boot_variable_file = a
     elif o == "--partial":
-      partitions = a.split()
-      if not partitions:
-        raise ValueError("Cannot parse partitions in {}".format(a))
+      if a:
+        partitions = a.split()
+        if not partitions:
+          raise ValueError("Cannot parse partitions in {}".format(a))
+      else:
+        partitions = []
       OPTIONS.partial = partitions
     elif o == "--custom_image":
       custom_partition, custom_image = a.split("=")
@@ -1190,13 +1228,12 @@
                                  "vabc_compression_param=",
                                  "security_patch_level=",
                              ], extra_option_handler=option_handler)
+  common.InitLogging()
 
   if len(args) != 2:
     common.Usage(__doc__)
     sys.exit(1)
 
-  common.InitLogging()
-
   # Load the build info dicts from the zip directly or the extracted input
   # directory. We don't need to unzip the entire target-files zips, because they
   # won't be needed for A/B OTAs (brillo_update_payload does that on its own).
@@ -1313,6 +1350,15 @@
     source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     is_spl_downgrade = target_spl < source_spl
+    if is_spl_downgrade and target_build_prop.GetProp("ro.build.tags") == "release-keys":
+      raise common.ExternalError(
+          "Target security patch level {} is older than source SPL {} "
+          "A locked bootloader will reject SPL downgrade no matter "
+          "what(even if data wipe is done), so SPL downgrade on any "
+          "release-keys build is not allowed.".format(target_spl, source_spl))
+
+    logger.info("SPL downgrade on %s",
+                target_build_prop.GetProp("ro.build.tags"))
     if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
       raise common.ExternalError(
           "Target security patch level {} is older than source SPL {} applying "
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 9f41874..985aeda 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -25,7 +25,7 @@
 from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
-                    GetRamdiskFormat)
+                    GetRamdiskFormat, ParseUpdateEngineConfig)
 from payload_signer import PayloadSigner
 
 
@@ -132,8 +132,11 @@
 
   # Re-sign the package after updating the metadata entry.
   if no_signing:
+    logger.info(f"Signing disabled for output file {output_file}")
     shutil.copy(prelim_signing, output_file)
   else:
+    logger.info(
+        f"Signing the output file {output_file} with key {package_key}")
     SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
@@ -719,6 +722,45 @@
   return sourceEntry and targetEntry and sourceEntry == targetEntry
 
 
+def ExtractTargetFiles(path: str):
+  if os.path.isdir(path):
+    logger.info("target files %s is already extracted", path)
+    return path
+  extracted_dir = common.MakeTempDir("target_files")
+  common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN)
+  return extracted_dir
+
+
+def LocatePartitionPath(target_files_dir: str, partition: str, allow_empty):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".img")
+  if os.path.exists(path):
+    return path
+  path = os.path.join(target_files_dir, "IMAGES", partition + ".img")
+  if os.path.exists(path):
+    return path
+  if allow_empty:
+    return ""
+  raise common.ExternalError(
+      "Partition {} not found in target files {}".format(partition, target_files_dir))
+
+
+def GetPartitionImages(target_files_dir: str, ab_partitions, allow_empty=True):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionPath(target_files_dir, partition, allow_empty) for partition in ab_partitions])
+
+
+def LocatePartitionMap(target_files_dir: str, partition: str):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".map")
+  if os.path.exists(path):
+    return path
+  return ""
+
+
+def GetPartitionMaps(target_files_dir: str, ab_partitions):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionMap(target_files_dir, partition) for partition in ab_partitions])
+
+
 class PayloadGenerator(object):
   """Manages the creation and the signing of an A/B OTA Payload."""
 
@@ -727,7 +769,7 @@
   SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
   SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
 
-  def __init__(self, secondary=False, wipe_user_data=False):
+  def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
     """Initializes a Payload instance.
 
     Args:
@@ -737,6 +779,8 @@
     self.payload_properties = None
     self.secondary = secondary
     self.wipe_user_data = wipe_user_data
+    self.minor_version = minor_version
+    self.is_partial_update = is_partial_update
 
   def _Run(self, cmd):  # pylint: disable=no-self-use
     # Don't pipe (buffer) the output if verbose is set. Let
@@ -755,21 +799,56 @@
       source_file: The filename of the source build target-files zip; or None if
           generating a full OTA.
       additional_args: A list of additional args that should be passed to
-          brillo_update_payload script; or None.
+          delta_generator binary; or None.
     """
     if additional_args is None:
       additional_args = []
 
     payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-    cmd = ["brillo_update_payload", "generate",
-           "--payload", payload_file,
-           "--target_image", target_file]
+    target_dir = ExtractTargetFiles(target_file)
+    cmd = ["delta_generator",
+           "--out_file", payload_file]
+    with open(os.path.join(target_dir, "META", "ab_partitions.txt")) as fp:
+      ab_partitions = fp.read().strip().split("\n")
+    cmd.extend(["--partition_names", ":".join(ab_partitions)])
+    cmd.extend(
+        ["--new_partitions", GetPartitionImages(target_dir, ab_partitions, False)])
+    cmd.extend(
+        ["--new_mapfiles", GetPartitionMaps(target_dir, ab_partitions)])
     if source_file is not None:
-      cmd.extend(["--source_image", source_file])
+      source_dir = ExtractTargetFiles(source_file)
+      cmd.extend(
+          ["--old_partitions", GetPartitionImages(source_dir, ab_partitions, True)])
+      cmd.extend(
+          ["--old_mapfiles", GetPartitionMaps(source_dir, ab_partitions)])
+
       if OPTIONS.disable_fec_computation:
-        cmd.extend(["--disable_fec_computation", "true"])
+        cmd.extend(["--disable_fec_computation=true"])
       if OPTIONS.disable_verity_computation:
-        cmd.extend(["--disable_verity_computation", "true"])
+        cmd.extend(["--disable_verity_computation=true"])
+    postinstall_config = os.path.join(
+        target_dir, "META", "postinstall_config.txt")
+
+    if os.path.exists(postinstall_config):
+      cmd.extend(["--new_postinstall_config_file", postinstall_config])
+    dynamic_partition_info = os.path.join(
+        target_dir, "META", "dynamic_partitions_info.txt")
+
+    if os.path.exists(dynamic_partition_info):
+      cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
+
+    major_version, minor_version = ParseUpdateEngineConfig(
+        os.path.join(target_dir, "META", "update_engine_config.txt"))
+    if source_file:
+      major_version, minor_version = ParseUpdateEngineConfig(
+          os.path.join(source_dir, "META", "update_engine_config.txt"))
+    if self.minor_version:
+      minor_version = self.minor_version
+    cmd.extend(["--major_version", str(major_version)])
+    if source_file is not None or self.is_partial_update:
+      cmd.extend(["--minor_version", str(minor_version)])
+    if self.is_partial_update:
+      cmd.extend(["--is_partial_update=true"])
     cmd.extend(additional_args)
     self._Run(cmd)
 
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index d3fbdad..8291448 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -99,14 +99,14 @@
       The second dir will be used for lookup if BOARD_USES_RECOVERY_AS_BOOT is
       set to true.
 
-  --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
-         vbmeta_vendor}_algorithm <algorithm>
-  --avb_{boot,recovery,system,system_other,vendor,dtbo,vbmeta,vbmeta_system,
-         vbmeta_vendor}_key <key>
+  --avb_{boot,init_boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+         vbmeta_system,vbmeta_vendor}_algorithm <algorithm>
+  --avb_{boot,init_boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+         vbmeta_system,vbmeta_vendor}_key <key>
       Use the specified algorithm (e.g. SHA256_RSA4096) and the key to AVB-sign
       the specified image. Otherwise it uses the existing values in info dict.
 
-  --avb_{apex,boot,recovery,system,system_other,vendor,dtbo,vbmeta,
+  --avb_{apex,init_boot,boot,recovery,system,system_other,vendor,dtbo,vbmeta,
          vbmeta_system,vbmeta_vendor}_extra_args <args>
       Specify any additional args that are needed to AVB-sign the image
       (e.g. "--signing_helper /path/to/helper"). The args will be appended to
@@ -531,7 +531,14 @@
       # RECOVERY/RAMDISK/default.prop is a legacy path, but will always exist
       # as a symlink in the current code. So it's a no-op here. Keeping the
       # path here for clarity.
-      "RECOVERY/RAMDISK/default.prop") or filename.endswith("build.prop")
+      # Some build props might be stored under path
+      # VENDOR_BOOT/RAMDISK_FRAGMENTS/recovery/RAMDISK/default.prop, and
+      # default.prop can be a symbolic link to prop.default, so overwrite all
+      # files that ends with build.prop, default.prop or prop.default
+      "RECOVERY/RAMDISK/default.prop") or \
+        filename.endswith("build.prop") or \
+        filename.endswith("/default.prop") or \
+        filename.endswith("/prop.default")
 
 
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
@@ -1363,6 +1370,12 @@
       OPTIONS.avb_algorithms['dtbo'] = a
     elif o == "--avb_dtbo_extra_args":
       OPTIONS.avb_extra_args['dtbo'] = a
+    elif o == "--avb_init_boot_key":
+      OPTIONS.avb_keys['init_boot'] = a
+    elif o == "--avb_init_boot_algorithm":
+      OPTIONS.avb_algorithms['init_boot'] = a
+    elif o == "--avb_init_boot_extra_args":
+      OPTIONS.avb_extra_args['init_boot'] = a
     elif o == "--avb_recovery_key":
       OPTIONS.avb_keys['recovery'] = a
     elif o == "--avb_recovery_algorithm":
@@ -1458,6 +1471,9 @@
           "avb_dtbo_algorithm=",
           "avb_dtbo_key=",
           "avb_dtbo_extra_args=",
+          "avb_init_boot_algorithm=",
+          "avb_init_boot_key=",
+          "avb_init_boot_extra_args=",
           "avb_recovery_algorithm=",
           "avb_recovery_key=",
           "avb_recovery_extra_args=",
diff --git a/tools/releasetools/test_check_target_files_vintf.py b/tools/releasetools/test_check_target_files_vintf.py
index 8725dd6..7c154d7 100644
--- a/tools/releasetools/test_check_target_files_vintf.py
+++ b/tools/releasetools/test_check_target_files_vintf.py
@@ -15,6 +15,7 @@
 #
 
 import os.path
+import shutil
 
 import common
 import test_utils
@@ -86,6 +87,28 @@
 
     return test_dir
 
+  # Prepare test dir with required HAL for APEX testing
+  def prepare_apex_test_dir(self, test_delta_rel_path):
+    test_dir = self.prepare_test_dir(test_delta_rel_path)
+    write_string_to_file(
+        """<compatibility-matrix version="1.0" level="1" type="framework">
+            <hal format="aidl" optional="false" updatable-via-apex="true">
+                <name>android.apex.foo</name>
+                <version>1</version>
+                <interface>
+                    <name>IApex</name>
+                    <instance>default</instance>
+                </interface>
+            </hal>
+            <sepolicy>
+                <sepolicy-version>0.0</sepolicy-version>
+                <kernel-sepolicy-version>0</kernel-sepolicy-version>
+            </sepolicy>
+        </compatibility-matrix>""",
+        os.path.join(test_dir, 'SYSTEM/etc/vintf/compatibility_matrix.1.xml'))
+
+    return test_dir
+
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_CheckVintf_skeleton(self):
     msg = 'vintf check with skeleton target files failed.'
@@ -143,3 +166,25 @@
                          os.path.join(test_dir, 'VENDOR/etc/vintf/manifest.xml'))
     # Should raise an error because a file has invalid format.
     self.assertRaises(common.ExternalError, CheckVintf, test_dir)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_compat(self):
+    apex_file_name = 'com.android.apex.vendor.foo.with_vintf.apex'
+    msg = 'vintf/apex_compat should be compatible because ' \
+          'APEX %s has the required HALs' % (apex_file_name)
+    test_dir = self.prepare_apex_test_dir('vintf/apex_compat')
+    # Copy APEX under VENDOR/apex
+    apex_file = os.path.join(test_utils.get_current_dir(), apex_file_name)
+    apex_dir = os.path.join(test_dir, 'VENDOR/apex')
+    os.makedirs(apex_dir)
+    shutil.copy(apex_file, apex_dir)
+    # Should find required HAL via APEX
+    self.assertTrue(CheckVintf(test_dir), msg=msg)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_incompat(self):
+    msg = 'vintf/apex_incompat should be incompatible because ' \
+          'no APEX data'
+    test_dir = self.prepare_apex_test_dir('vintf/apex_incompat')
+    # Should not find required HAL
+    self.assertFalse(CheckVintf(test_dir), msg=msg)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 2a0e592..2dfd8c7 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -452,12 +452,14 @@
         test_file.write(bytes(data))
       test_file.close()
 
-      expected_stat = os.stat(test_file_name)
       expected_mode = extra_zipwrite_args.get("perms", 0o644)
       expected_compress_type = extra_zipwrite_args.get("compress_type",
                                                        zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
+      expected_stat = os.stat(test_file_name)
       common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
       common.ZipClose(zip_file)
 
@@ -480,8 +482,6 @@
     try:
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
-
       if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
         arcname = zinfo_or_arcname
         expected_mode = extra_args.get("perms", 0o644)
@@ -528,11 +528,13 @@
         test_file.write(data)
       test_file.close()
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
       expected_stat = os.stat(test_file_name)
       expected_mode = 0o644
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
       common.ZipWrite(zip_file, test_file_name, **extra_args)
       common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
new file mode 100644
index 0000000..f6c0190
--- /dev/null
+++ b/tools/sbom/Android.bp
@@ -0,0 +1,53 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+python_binary_host {
+    name: "generate-sbom",
+    srcs: [
+        "generate-sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+        "sbom_lib",
+    ],
+}
+
+python_library_host {
+    name: "sbom_lib",
+    srcs: [
+        "sbom_data.py",
+        "sbom_writers.py",
+    ],
+}
+
+python_test_host {
+    name: "sbom_writers_test",
+    main: "sbom_writers_test.py",
+    srcs: [
+        "sbom_writers_test.py",
+    ],
+    data: [
+        "testdata/*",
+    ],
+    libs: [
+        "sbom_lib",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
new file mode 100755
index 0000000..0c5deb2
--- /dev/null
+++ b/tools/sbom/generate-sbom.py
@@ -0,0 +1,536 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+                   --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+                   --product_out_dir=out/target/product/vsoc_x86_64 \
+                   --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+                   --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+  parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+  parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def encode_for_spdxid(s):
+  """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+  result = ''
+  for c in s:
+    if c.isalnum() or c in '.-':
+      result += c
+    elif c in '_@/':
+      result += '-'
+    else:
+      result += '0x' + c.encode('utf-8').hex()
+
+  return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def checksum(file_path):
+  file_path = args.product_out_dir + '/' + file_path
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
+      'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
+      'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
+      'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
+      'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
+      'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
+      'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
+      'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
+      'vndk_prebuilt_shared',
+
+      # 'android_test_import',
+      # 'cc_prebuilt_test_library_shared',
+      # 'java_import_host',
+      # 'java_test_import',
+      # 'llvm_host_prebuilt_library_shared',
+      # 'prebuilt_apis',
+      # 'prebuilt_build_tool',
+      # 'prebuilt_defaults',
+      # 'prebuilt_etc',
+      # 'prebuilt_etc_host',
+      # 'prebuilt_etc_xml',
+      # 'prebuilt_font',
+      # 'prebuilt_hidl_interfaces',
+      # 'prebuilt_platform_compat_config',
+      # 'prebuilt_stubs_sources',
+      # 'prebuilt_usr_share',
+      # 'prebuilt_usr_share_host',
+      # 'soong_config_module_type_import',
+  ]
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  """Return source package info exists in its METADATA file, currently including name, security tag
+  and external SBOM reference.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe23Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe22Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+  module path or kernel module's source path if the installed file is a kernel module.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  """Search for METADATA file of a package and return its path."""
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  """Return a package's version in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  """Return a package's homepage URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  """Return a package's code repository URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+  package, a UPSTREAM package if it's a source package and a external SBOM document reference if
+  it's a prebuilt package with sbom_ref defined in its METADATA file.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  external_doc_ref = None
+  packages = []
+  relationships = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+                                       supplier='Organization: ' + args.product_mfr,
+                                       external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier='Organization: ' + homepage if homepage else None,
+                                         download_location=download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=source_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+                                         name=name,
+                                         version=args.build_version,
+                                         supplier='Organization: ' + args.product_mfr)
+    packages.append(prebuilt_package)
+
+    if metadata_file_path:
+      metadata_proto = metadata_file_protos[metadata_file_path]
+      if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+        sbom_url = metadata_proto.third_party.sbom_ref.url
+        sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+        upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+        if sbom_url and sbom_checksum and upstream_element_id:
+          doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+          external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+                                                                 uri=sbom_url,
+                                                                 checksum=sbom_checksum)
+          relationships.append(
+            sbom_data.Relationship(id1=prebuilt_package_id,
+                                   relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                   id2=doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+  checksums = [file.checksum for file in files]
+  checksums.sort()
+  h = hashlib.sha1()
+  h.update(''.join(checksums).encode(encoding='utf-8'))
+  return h.hexdigest()
+
+
+def save_report(report):
+  prefix, _ = os.path.splitext(args.output_file)
+  with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_sbom_for_unbundled():
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    doc = sbom_data.Document(name=args.build_version,
+                             namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                             creators=['Organization: ' + args.product_mfr])
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      if args.output_file != args.product_out_dir + installed_file + ".spdx":
+        continue
+
+      module_path = installed_file_metadata['module_path']
+      package_id = new_package_id(module_path, PKG_PREBUILT)
+      package = sbom_data.Package(id=package_id,
+                                  name=module_path,
+                                  version=args.build_version,
+                                  supplier='Organization: ' + args.product_mfr)
+      file_id = new_file_id(installed_file)
+      file = sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file))
+      relationship = sbom_data.Relationship(id1=file_id,
+                                            relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                            id2=package_id)
+      doc.add_package(package)
+      doc.files.append(file)
+      doc.describes = file_id
+      doc.add_relationship(relationship)
+      doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+      break
+
+  with open(args.output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file, fragment=True)
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  if args.unbundled:
+    generate_sbom_for_unbundled()
+    return
+
+  global metadata_file_protos
+  metadata_file_protos = {}
+
+  doc = sbom_data.Document(name=args.build_version,
+                           namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                           creators=['Organization: ' + args.product_mfr])
+
+  product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                                      name=sbom_data.PACKAGE_NAME_PRODUCT,
+                                      version=args.build_version,
+                                      supplier='Organization: ' + args.product_mfr,
+                                      files_analyzed=True)
+  doc.packages.append(product_package)
+
+  doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                                        version=args.build_version,
+                                        supplier='Organization: ' + args.product_mfr))
+
+  # Report on some issues and information
+  report = {
+    ISSUE_NO_METADATA: [],
+    ISSUE_NO_METADATA_FILE: [],
+    ISSUE_METADATA_FILE_INCOMPLETE: [],
+    ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+    ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+    INFO_METADATA_FOUND_FOR_PACKAGE: [],
+  }
+
+  # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      module_path = installed_file_metadata['module_path']
+      product_copy_files = installed_file_metadata['product_copy_files']
+      kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+
+      if not installed_file_has_metadata(installed_file_metadata, report):
+        continue
+      file_path = args.product_out_dir + '/' + installed_file
+      if not (os.path.islink(file_path) or os.path.isfile(file_path)):
+        report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+        continue
+
+      file_id = new_file_id(installed_file)
+      doc.files.append(
+        sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file)))
+      product_package.file_ids.append(file_id)
+
+      if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+        metadata_file_path = get_metadata_file_path(installed_file_metadata)
+        report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+        # File from source fork packages or prebuilt fork packages
+        external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+        if len(pkgs) > 0:
+          if external_doc_ref:
+            doc.add_external_ref(external_doc_ref)
+          for p in pkgs:
+            doc.add_package(p)
+          for rel in rels:
+            doc.add_relationship(rel)
+          fork_package_id = pkgs[0].id  # The first package should be the source/prebuilt fork package
+          doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                      relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                      id2=fork_package_id))
+      elif module_path or installed_file_metadata['is_platform_generated']:
+        # File from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif product_copy_files:
+        # Format of product_copy_files: <source path>:<dest path>
+        src_path = product_copy_files.split(':')[0]
+        # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+        # so process them as files from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif installed_file.endswith('.fsv_meta'):
+        # See build/make/core/Makefile:2988
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+        # For the four files generated for _dlkm, _ramdisk partitions
+        # See build/make/core/Makefile:323
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+
+  product_package.verification_code = generate_package_verification_code(doc.files)
+
+  # Save SBOM records to output file
+  doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  with open(args.output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file)
+  if args.json:
+    with open(args.output_file+'.json', 'w', encoding="utf-8") as file:
+      sbom_writers.JSONWriter.write(doc, file)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
new file mode 100644
index 0000000..0c380f6
--- /dev/null
+++ b/tools/sbom/sbom_data.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Define data classes that model SBOMs defined by SPDX. The data classes could be
+written out to different formats (tagvalue, JSON, etc) of SPDX with corresponding
+writer utilities.
+
+Rrefer to SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/ and go/android-spdx for details of
+fields in each data class.
+"""
+
+from dataclasses import dataclass, field
+from typing import List
+
+SPDXID_DOC = 'SPDXRef-DOCUMENT'
+SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
+SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+
+PACKAGE_NAME_PRODUCT = 'PRODUCT'
+PACKAGE_NAME_PLATFORM = 'PLATFORM'
+
+
+class PackageExternalRefCategory:
+  SECURITY = 'SECURITY'
+  PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+  PERSISTENT_ID = 'PERSISTENT-ID'
+  OTHER = 'OTHER'
+
+
+class PackageExternalRefType:
+  cpe22Type = 'cpe22Type'
+  cpe23Type = 'cpe23Type'
+
+
+@dataclass
+class PackageExternalRef:
+  category: PackageExternalRefCategory
+  type: PackageExternalRefType
+  locator: str
+
+
+@dataclass
+class Package:
+  name: str
+  id: str
+  version: str = None
+  supplier: str = None
+  download_location: str = None
+  files_analyzed: bool = False
+  verification_code: str = None
+  file_ids: List[str] = field(default_factory=list)
+  external_refs: List[PackageExternalRef] = field(default_factory=list)
+
+
+@dataclass
+class File:
+  id: str
+  name: str
+  checksum: str
+
+
+class RelationshipType:
+  DESCRIBES = 'DESCRIBES'
+  VARIANT_OF = 'VARIANT_OF'
+  GENERATED_FROM = 'GENERATED_FROM'
+
+
+@dataclass
+class Relationship:
+  id1: str
+  relationship: RelationshipType
+  id2: str
+
+
+@dataclass
+class DocumentExternalReference:
+  id: str
+  uri: str
+  checksum: str
+
+
+@dataclass
+class Document:
+  name: str
+  namespace: str
+  id: str = SPDXID_DOC
+  describes: str = SPDXID_PRODUCT
+  creators: List[str] = field(default_factory=list)
+  created: str = None
+  external_refs: List[DocumentExternalReference] = field(default_factory=list)
+  packages: List[Package] = field(default_factory=list)
+  files: List[File] = field(default_factory=list)
+  relationships: List[Relationship] = field(default_factory=list)
+
+  def add_external_ref(self, external_ref):
+    if not any(external_ref.uri == ref.uri for ref in self.external_refs):
+      self.external_refs.append(external_ref)
+
+  def add_package(self, package):
+    if not any(package.id == p.id for p in self.packages):
+      self.packages.append(package)
+
+  def add_relationship(self, rel):
+    if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
+               for r in self.relationships):
+      self.relationships.append(rel)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
new file mode 100644
index 0000000..66aa6b4
--- /dev/null
+++ b/tools/sbom/sbom_writers.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Serialize objects defined in package sbom_data to SPDX format: tagvalue, JSON.
+"""
+
+import json
+import sbom_data
+
+SPDX_VER = 'SPDX-2.3'
+DATA_LIC = 'CC0-1.0'
+
+
+class Tags:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'SPDXVersion'
+  DATA_LICENSE = 'DataLicense'
+  DOCUMENT_NAME = 'DocumentName'
+  DOCUMENT_NAMESPACE = 'DocumentNamespace'
+  CREATED = 'Created'
+  CREATOR = 'Creator'
+  EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+  # Package
+  PACKAGE_NAME = 'PackageName'
+  PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+  PACKAGE_VERSION = 'PackageVersion'
+  PACKAGE_SUPPLIER = 'PackageSupplier'
+  FILES_ANALYZED = 'FilesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+  PACKAGE_EXTERNAL_REF = 'ExternalRef'
+  # Package license
+  PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+  PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+  PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+  PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+  # File
+  FILE_NAME = 'FileName'
+  FILE_CHECKSUM = 'FileChecksum'
+  # File license
+  FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+  FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+  FILE_LICENSE_COMMENTS = 'LicenseComments'
+  FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+  FILE_NOTICE = 'FileNotice'
+  FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+  # Relationship
+  RELATIONSHIP = 'Relationship'
+
+
+class TagValueWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = [
+      f'{Tags.SPDX_VERSION}: {SPDX_VER}',
+      f'{Tags.DATA_LICENSE}: {DATA_LIC}',
+      f'{Tags.SPDXID}: {sbom_doc.id}',
+      f'{Tags.DOCUMENT_NAME}: {sbom_doc.name}',
+      f'{Tags.DOCUMENT_NAMESPACE}: {sbom_doc.namespace}',
+    ]
+    for creator in sbom_doc.creators:
+      headers.append(f'{Tags.CREATOR}: {creator}')
+    headers.append(f'{Tags.CREATED}: {sbom_doc.created}')
+    for doc_ref in sbom_doc.external_refs:
+      headers.append(
+        f'{Tags.EXTERNAL_DOCUMENT_REF}: {doc_ref.id} {doc_ref.uri} {doc_ref.checksum}')
+    headers.append('')
+    return headers
+
+  @staticmethod
+  def marshal_package(package):
+    download_location = 'NONE'
+    if package.download_location:
+      download_location = package.download_location
+    tagvalues = [
+      f'{Tags.PACKAGE_NAME}: {package.name}',
+      f'{Tags.SPDXID}: {package.id}',
+      f'{Tags.PACKAGE_DOWNLOAD_LOCATION}: {download_location}',
+      f'{Tags.FILES_ANALYZED}: {str(package.files_analyzed).lower()}',
+    ]
+    if package.version:
+      tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
+    if package.supplier:
+      tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+    if package.verification_code:
+      tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
+    if package.external_refs:
+      for external_ref in package.external_refs:
+        tagvalues.append(
+          f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
+
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def marshal_described_element(sbom_doc):
+    if not sbom_doc.describes:
+      return None
+
+    product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
+    if product_package:
+      tagvalues = TagValueWriter.marshal_package(product_package[0])
+      tagvalues.append(
+        f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+      tagvalues.append('')
+      return tagvalues
+
+    file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
+    if file:
+      tagvalues = [
+        f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}'
+      ]
+
+      return tagvalues
+
+    return None
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    tagvalues = []
+    marshaled_relationships = []
+    i = 0
+    packages = sbom_doc.packages
+    while i < len(packages):
+      if packages[i].id == sbom_doc.describes:
+        i += 1
+        continue
+
+      if i + 1 < len(packages) \
+          and packages[i].id.startswith('SPDXRef-SOURCE-') \
+          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+        rel = next((r for r in sbom_doc.relationships if
+                    r.id1 == packages[i].id and
+                    r.id2 == packages[i + 1].id and
+                    r.relationship == sbom_data.RelationshipType.VARIANT_OF), None)
+        if rel:
+          marshaled_relationships.append(rel)
+          tagvalues.append(TagValueWriter.marshal_relationship(rel))
+          tagvalues.append('')
+
+        i += 2
+      else:
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        i += 1
+
+    return tagvalues, marshaled_relationships
+
+  @staticmethod
+  def marshal_file(file):
+    tagvalues = [
+      f'{Tags.FILE_NAME}: {file.name}',
+      f'{Tags.SPDXID}: {file.id}',
+      f'{Tags.FILE_CHECKSUM}: {file.checksum}',
+      '',
+    ]
+
+    return tagvalues
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    tagvalues = []
+    for file in sbom_doc.files:
+      tagvalues += TagValueWriter.marshal_file(file)
+    return tagvalues
+
+  @staticmethod
+  def marshal_relationship(rel):
+    return f'{Tags.RELATIONSHIP}: {rel.id1} {rel.relationship} {rel.id2}'
+
+  @staticmethod
+  def marshal_relationships(sbom_doc, marshaled_rels):
+    tagvalues = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.id2 + r.id1)
+    for rel in sorted_rels:
+      if any(r.id1 == rel.id1 and r.id2 == rel.id2 and r.relationship == rel.relationship
+             for r in marshaled_rels):
+        continue
+      tagvalues.append(TagValueWriter.marshal_relationship(rel))
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def write(sbom_doc, file, fragment=False):
+    content = []
+    if not fragment:
+      content += TagValueWriter.marshal_doc_headers(sbom_doc)
+      described_element = TagValueWriter.marshal_described_element(sbom_doc)
+      if described_element:
+        content += described_element
+    content += TagValueWriter.marshal_files(sbom_doc)
+    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+    content += tagvalues
+    content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+    file.write('\n'.join(content))
+
+
+class PropNames:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'spdxVersion'
+  DATA_LICENSE = 'dataLicense'
+  NAME = 'name'
+  DOCUMENT_NAMESPACE = 'documentNamespace'
+  CREATION_INFO = 'creationInfo'
+  CREATORS = 'creators'
+  CREATED = 'created'
+  EXTERNAL_DOCUMENT_REF = 'externalDocumentRefs'
+  DOCUMENT_DESCRIBES = 'documentDescribes'
+  EXTERNAL_DOCUMENT_ID = 'externalDocumentId'
+  EXTERNAL_DOCUMENT_URI = 'spdxDocument'
+  EXTERNAL_DOCUMENT_CHECKSUM = 'checksum'
+  ALGORITHM = 'algorithm'
+  CHECKSUM_VALUE = 'checksumValue'
+
+  # Package
+  PACKAGES = 'packages'
+  PACKAGE_DOWNLOAD_LOCATION = 'downloadLocation'
+  PACKAGE_VERSION = 'versionInfo'
+  PACKAGE_SUPPLIER = 'supplier'
+  FILES_ANALYZED = 'filesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'packageVerificationCode'
+  PACKAGE_VERIFICATION_CODE_VALUE = 'packageVerificationCodeValue'
+  PACKAGE_EXTERNAL_REFS = 'externalRefs'
+  PACKAGE_EXTERNAL_REF_CATEGORY = 'referenceCategory'
+  PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
+  PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
+  PACKAGE_HAS_FILES = 'hasFiles'
+
+  # File
+  FILES = 'files'
+  FILE_NAME = 'fileName'
+  FILE_CHECKSUMS = 'checksums'
+
+  # Relationship
+  RELATIONSHIPS = 'relationships'
+  REL_ELEMENT_ID = 'spdxElementId'
+  REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
+  REL_TYPE = 'relationshipType'
+
+
+class JSONWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = {
+      PropNames.SPDX_VERSION: SPDX_VER,
+      PropNames.DATA_LICENSE: DATA_LIC,
+      PropNames.SPDXID: sbom_doc.id,
+      PropNames.NAME: sbom_doc.name,
+      PropNames.DOCUMENT_NAMESPACE: sbom_doc.namespace,
+      PropNames.CREATION_INFO: {}
+    }
+    creators = [creator for creator in sbom_doc.creators]
+    headers[PropNames.CREATION_INFO][PropNames.CREATORS] = creators
+    headers[PropNames.CREATION_INFO][PropNames.CREATED] = sbom_doc.created
+    external_refs = []
+    for doc_ref in sbom_doc.external_refs:
+      checksum = doc_ref.checksum.split(': ')
+      external_refs.append({
+        PropNames.EXTERNAL_DOCUMENT_ID: f'{doc_ref.id}',
+        PropNames.EXTERNAL_DOCUMENT_URI: doc_ref.uri,
+        PropNames.EXTERNAL_DOCUMENT_CHECKSUM: {
+          PropNames.ALGORITHM: checksum[0],
+          PropNames.CHECKSUM_VALUE: checksum[1]
+        }
+      })
+    if external_refs:
+      headers[PropNames.EXTERNAL_DOCUMENT_REF] = external_refs
+    headers[PropNames.DOCUMENT_DESCRIBES] = [sbom_doc.describes]
+
+    return headers
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    packages = []
+    for p in sbom_doc.packages:
+      package = {
+        PropNames.NAME: p.name,
+        PropNames.SPDXID: p.id,
+        PropNames.PACKAGE_DOWNLOAD_LOCATION: p.download_location if p.download_location else 'NONE',
+        PropNames.FILES_ANALYZED: p.files_analyzed
+      }
+      if p.version:
+        package[PropNames.PACKAGE_VERSION] = p.version
+      if p.supplier:
+        package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+      if p.verification_code:
+        package[PropNames.PACKAGE_VERIFICATION_CODE] = {
+          PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
+        }
+      if p.external_refs:
+        package[PropNames.PACKAGE_EXTERNAL_REFS] = []
+        for ref in p.external_refs:
+          ext_ref = {
+            PropNames.PACKAGE_EXTERNAL_REF_CATEGORY: ref.category,
+            PropNames.PACKAGE_EXTERNAL_REF_TYPE: ref.type,
+            PropNames.PACKAGE_EXTERNAL_REF_LOCATOR: ref.locator,
+          }
+          package[PropNames.PACKAGE_EXTERNAL_REFS].append(ext_ref)
+      if p.file_ids:
+        package[PropNames.PACKAGE_HAS_FILES] = []
+        for file_id in p.file_ids:
+          package[PropNames.PACKAGE_HAS_FILES].append(file_id)
+
+      packages.append(package)
+
+    return {PropNames.PACKAGES: packages}
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    files = []
+    for f in sbom_doc.files:
+      file = {
+        PropNames.FILE_NAME: f.name,
+        PropNames.SPDXID: f.id
+      }
+      checksum = f.checksum.split(': ')
+      file[PropNames.FILE_CHECKSUMS] = [{
+        PropNames.ALGORITHM: checksum[0],
+        PropNames.CHECKSUM_VALUE: checksum[1],
+      }]
+      files.append(file)
+    return {PropNames.FILES: files}
+
+  @staticmethod
+  def marshal_relationships(sbom_doc):
+    relationships = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.relationship + r.id2 + r.id1)
+    for r in sorted_rels:
+      rel = {
+        PropNames.REL_ELEMENT_ID: r.id1,
+        PropNames.REL_RELATED_ELEMENT_ID: r.id2,
+        PropNames.REL_TYPE: r.relationship,
+      }
+      relationships.append(rel)
+
+    return {PropNames.RELATIONSHIPS: relationships}
+
+  @staticmethod
+  def write(sbom_doc, file):
+    doc = {}
+    doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
+    doc.update(JSONWriter.marshal_packages(sbom_doc))
+    doc.update(JSONWriter.marshal_files(sbom_doc))
+    doc.update(JSONWriter.marshal_relationships(sbom_doc))
+    file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
new file mode 100644
index 0000000..4db2bb7
--- /dev/null
+++ b/tools/sbom/sbom_writers_test.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import pathlib
+import unittest
+import sbom_data
+import sbom_writers
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+
+
+class SBOMWritersTest(unittest.TestCase):
+
+  def setUp(self):
+    # SBOM of a product
+    self.sbom_doc = sbom_data.Document(name='test doc',
+                                       namespace='http://www.google.com/sbom/spdx/android',
+                                       creators=[SUPPLIER_GOOGLE],
+                                       created='2023-03-31T22:17:58Z',
+                                       describes=sbom_data.SPDXID_PRODUCT)
+    self.sbom_doc.add_external_ref(
+      sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+                                          uri='external_doc_uri',
+                                          checksum='SHA1: 1234567890'))
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                        name=sbom_data.PACKAGE_NAME_PRODUCT,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        files_analyzed=True,
+                        verification_code='123456',
+                        file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3]))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+                        name='Prebuilt package1',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Source package1',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        external_refs=[sbom_data.PackageExternalRef(
+                          category=sbom_data.PackageExternalRefCategory.SECURITY,
+                          type=sbom_data.PackageExternalRefType.cpe22Type,
+                          locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+                        name='Upstream package1',
+                        supplier=SUPPLIER_UPSTREAM,
+                        version='1.1',
+                        ))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+                                                          relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                          id2=SPDXID_UPSTREAM_PACKAGE1))
+
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=sbom_data.SPDXID_PLATFORM))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_PREBUILT_PACKAGE1))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_SOURCE_PACKAGE1
+                                                          ))
+
+    # SBOM fragment of a APK
+    self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
+                                                 namespace='http://www.google.com/sbom/spdx/android',
+                                                 creators=[SUPPLIER_GOOGLE],
+                                                 created='2023-03-31T22:17:58Z',
+                                                 describes=SPDXID_FILE1)
+
+    self.unbundled_sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1.apk', checksum='SHA1: 11111'))
+    self.unbundled_sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Unbundled apk package',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT))
+    self.unbundled_sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                                    id2=SPDXID_SOURCE_PACKAGE1))
+
+  def test_tagvalue_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_tagvalue_writer_unbundled(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_unbundled.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_json_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.JSONWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_json_sbom.spdx.json').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
new file mode 100644
index 0000000..628615f
--- /dev/null
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -0,0 +1,137 @@
+{
+    "spdxVersion": "SPDX-2.3",
+    "dataLicense": "CC0-1.0",
+    "SPDXID": "SPDXRef-DOCUMENT",
+    "name": "test doc",
+    "documentNamespace": "http://www.google.com/sbom/spdx/android",
+    "creationInfo": {
+        "creators": [
+            "Organization: Google"
+        ],
+        "created": "2023-03-31T22:17:58Z"
+    },
+    "externalDocumentRefs": [
+        {
+            "externalDocumentId": "DocumentRef-external_doc_ref",
+            "spdxDocument": "external_doc_uri",
+            "checksum": {
+                "algorithm": "SHA1",
+                "checksumValue": "1234567890"
+            }
+        }
+    ],
+    "documentDescribes": [
+        "SPDXRef-PRODUCT"
+    ],
+    "packages": [
+        {
+            "name": "PRODUCT",
+            "SPDXID": "SPDXRef-PRODUCT",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": true,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "packageVerificationCode": {
+                "packageVerificationCodeValue": "123456"
+            },
+            "hasFiles": [
+                "SPDXRef-file1",
+                "SPDXRef-file2",
+                "SPDXRef-file3"
+            ]
+        },
+        {
+            "name": "PLATFORM",
+            "SPDXID": "SPDXRef-PLATFORM",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Prebuilt package1",
+            "SPDXID": "SPDXRef-PREBUILT-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Source package1",
+            "SPDXID": "SPDXRef-SOURCE-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "externalRefs": [
+                {
+                    "referenceCategory": "SECURITY",
+                    "referenceType": "cpe22Type",
+                    "referenceLocator": "cpe:/a:jsoncpp_project:jsoncpp:1.9.4"
+                }
+            ]
+        },
+        {
+            "name": "Upstream package1",
+            "SPDXID": "SPDXRef-UPSTREAM-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "1.1",
+            "supplier": "Organization: upstream"
+        }
+    ],
+    "files": [
+        {
+            "fileName": "/bin/file1",
+            "SPDXID": "SPDXRef-file1",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "11111"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file2",
+            "SPDXID": "SPDXRef-file2",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "22222"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file3",
+            "SPDXID": "SPDXRef-file3",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "33333"
+                }
+            ]
+        }
+    ],
+    "relationships": [
+        {
+            "spdxElementId": "SPDXRef-file1",
+            "relatedSpdxElement": "SPDXRef-PLATFORM",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file2",
+            "relatedSpdxElement": "SPDXRef-PREBUILT-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file3",
+            "relatedSpdxElement": "SPDXRef-SOURCE-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-SOURCE-package1",
+            "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
+            "relationshipType": "VARIANT_OF"
+        }
+    ]
+}
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
new file mode 100644
index 0000000..0f1c6f8
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -0,0 +1,65 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
new file mode 100644
index 0000000..a00c291
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -0,0 +1,12 @@
+FileName: /bin/file1.apk
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+PackageName: Unbundled apk package
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index bee6a6f..c4f25f8 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -31,6 +31,10 @@
         "conscrypt-unbundled",
     ],
 
+    // b/267608166: Prevent target Java 17 so the host-side tool can run in an
+    // environment where JDK 11 is available.
+    java_version: "11",
+
     jni_libs: ["libconscrypt_openjdk_jni"],
 
     // The post-build signing tools need signapk.jar (and its shared libraries,
diff --git a/tools/soong_to_convert.py b/tools/soong_to_convert.py
index 949131b..649829f 100755
--- a/tools/soong_to_convert.py
+++ b/tools/soong_to_convert.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2016 The Android Open Source Project
 #
@@ -50,9 +50,6 @@
 Not all problems can be discovered, but this is a starting point.
 
 """
-
-from __future__ import print_function
-
 import csv
 import sys
 
@@ -113,7 +110,7 @@
 
 def main(filename):
     """Read the CSV file, print the results"""
-    with open(filename, 'rb') as csvfile:
+    with open(filename, 'r') as csvfile:
         results = process(csv.reader(csvfile))
 
     native_results = filter(results, "native")
diff --git a/tools/stub_diff_analyzer.py b/tools/stub_diff_analyzer.py
new file mode 100644
index 0000000..e49d092
--- /dev/null
+++ b/tools/stub_diff_analyzer.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from sys import exit
+from typing import List
+from glob import glob
+from pathlib import Path
+from collections import defaultdict
+from difflib import Differ
+from re import split
+from tqdm import tqdm
+import argparse
+
+
+DIFFER_CODE_LEN = 2
+
+class DifferCodes:
+    COMMON = '  '
+    UNIQUE_FIRST = '- '
+    UNIQUE_SECOND = '+ '
+    DIFF_IDENT = '? '
+
+class FilesDiffAnalyzer:
+    def __init__(self, args) -> None:
+        self.out_dir = args.out_dir
+        self.show_diff = args.show_diff
+        self.skip_words = args.skip_words
+        self.first_dir = args.first_dir
+        self.second_dir = args.second_dir
+        self.include_common = args.include_common
+
+        self.first_dir_files = self.get_files(self.first_dir)
+        self.second_dir_files = self.get_files(self.second_dir)
+        self.common_file_map = defaultdict(set)
+
+        self.map_common_files(self.first_dir_files, self.first_dir)
+        self.map_common_files(self.second_dir_files, self.second_dir)
+
+    def get_files(self, dir: str) -> List[str]:
+        """Get all files directory in the input directory including the files in the subdirectories
+
+        Recursively finds all files in the input directory.
+        Returns a list of file directory strings, which do not include directories but only files.
+        List is sorted in alphabetical order of the file directories.
+
+        Args:
+            dir: Directory to get the files. String.
+
+        Returns:
+            A list of file directory strings within the input directory.
+            Sorted in Alphabetical order.
+
+        Raises:
+            FileNotFoundError: An error occurred accessing the non-existing directory
+        """
+
+        if not dir_exists(dir):
+            raise FileNotFoundError("Directory does not exist")
+
+        if dir[:-2] != "**":
+            if dir[:-1] != "/":
+                dir += "/"
+            dir += "**"
+
+        return [file for file in sorted(glob(dir, recursive=True)) if Path(file).is_file()]
+
+    def map_common_files(self, files: List[str], dir: str) -> None:
+        for file in files:
+            file_name = file.split(dir, 1)[-1]
+            self.common_file_map[file_name].add(dir)
+        return
+
+    def compare_file_contents(self, first_file: str, second_file: str) -> List[str]:
+        """Compare the contents of the files and return different lines
+
+        Given two file directory strings, compare the contents of the two files
+        and return the list of file contents string prepended with unique identifier codes.
+        The identifier codes include:
+        - '  '(two empty space characters): Line common to two files
+        - '- '(minus followed by a space) : Line unique to first file
+        - '+ '(plus followed by a space)  : Line unique to second file
+
+        Args:
+            first_file: First file directory string to compare the content
+            second_file: Second file directory string to compare the content
+
+        Returns:
+            A list of the file content strings. For example:
+
+            [
+                "  Foo",
+                "- Bar",
+                "+ Baz"
+            ]
+        """
+
+        d = Differ()
+        first_file_contents = sort_methods(get_file_contents(first_file))
+        second_file_contents = sort_methods(get_file_contents(second_file))
+        diff = list(d.compare(first_file_contents, second_file_contents))
+        ret = [f"diff {first_file} {second_file}"]
+
+        idx = 0
+        while idx < len(diff):
+            line = diff[idx]
+            line_code = line[:DIFFER_CODE_LEN]
+
+            match line_code:
+                case DifferCodes.COMMON:
+                    if self.include_common:
+                        ret.append(line)
+
+                case DifferCodes.UNIQUE_FIRST:
+                    # Should compare line
+                    if (idx < len(diff) - 1 and
+                        (next_line_code := diff[idx + 1][:DIFFER_CODE_LEN])
+                        not in (DifferCodes.UNIQUE_FIRST, DifferCodes.COMMON)):
+                        delta = 1 if next_line_code == DifferCodes.UNIQUE_SECOND else 2
+                        line_to_compare = diff[idx + delta]
+                        if self.lines_differ(line, line_to_compare):
+                            ret.extend([line, line_to_compare])
+                        else:
+                            if self.include_common:
+                                ret.append(DifferCodes.COMMON +
+                                           line[DIFFER_CODE_LEN:])
+                        idx += delta
+                    else:
+                        ret.append(line)
+
+                case DifferCodes.UNIQUE_SECOND:
+                    ret.append(line)
+
+                case DifferCodes.DIFF_IDENT:
+                    pass
+            idx += 1
+        return ret
+
+    def lines_differ(self, line1: str, line2: str) -> bool:
+        """Check if the input lines are different or not
+
+        Compare the two lines word by word and check if the two lines are different or not.
+        If the different words in the comparing lines are included in skip_words,
+        the lines are not considered different.
+
+        Args:
+            line1:      first line to compare
+            line2:      second line to compare
+
+        Returns:
+            Boolean value indicating if the two lines are different or not
+
+        """
+        # Split by '.' or ' '(whitespace)
+        def split_words(line: str) -> List[str]:
+            return split('\\s|\\.', line[DIFFER_CODE_LEN:])
+
+        line1_words, line2_words = split_words(line1), split_words(line2)
+        if len(line1_words) != len(line2_words):
+            return True
+
+        for word1, word2 in zip(line1_words, line2_words):
+            if word1 != word2:
+                # not check if words are equal to skip word, but
+                # check if words contain skip word as substring
+                if all(sw not in word1 and sw not in word2 for sw in self.skip_words):
+                    return True
+
+        return False
+
+    def analyze(self) -> None:
+        """Analyze file contents in both directories and write to output or console.
+        """
+        for file in tqdm(sorted(self.common_file_map.keys())):
+            val = self.common_file_map[file]
+
+            # When file exists in both directories
+            lines = list()
+            if val == set([self.first_dir, self.second_dir]):
+                lines = self.compare_file_contents(
+                    self.first_dir + file, self.second_dir + file)
+            else:
+                existing_dir, not_existing_dir = (
+                    (self.first_dir, self.second_dir) if self.first_dir in val
+                    else (self.second_dir, self.first_dir))
+
+                lines = [f"{not_existing_dir}{file} does not exist."]
+
+                if self.show_diff:
+                    lines.append(f"Content of {existing_dir}{file}: \n")
+                    lines.extend(get_file_contents(existing_dir + file))
+
+            self.write(lines)
+
+    def write(self, lines: List[str]) -> None:
+        if self.out_dir == "":
+            pprint(lines)
+        else:
+            write_lines(self.out_dir, lines)
+
+###
+# Helper functions
+###
+
+def sort_methods(lines: List[str]) -> List[str]:
+    """Sort class methods in the file contents by alphabetical order
+
+    Given lines of Java file contents, return lines with class methods sorted in alphabetical order.
+    Also omit empty lines or lines with spaces.
+    For example:
+        l = [
+            "package android.test;",
+            "",
+            "public static final int ORANGE = 1;",
+            "",
+            "public class TestClass {",
+            "public TestClass() { throw new RuntimeException("Stub!"); }",
+            "public void foo() { throw new RuntimeException("Stub!"); }",
+            "public void bar() { throw new RuntimeException("Stub!"); }",
+            "}"
+        ]
+        sort_methods(l) returns
+        [
+            "package android.test;",
+            "public static final int ORANGE = 1;",
+            "public class TestClass {",
+            "public TestClass() { throw new RuntimeException("Stub!"); }",
+            "public void bar() { throw new RuntimeException("Stub!"); }",
+            "public void foo() { throw new RuntimeException("Stub!"); }",
+            "}"
+        ]
+
+    Args:
+        lines: List of strings consisted of Java file contents.
+
+    Returns:
+        A list of string with sorted class methods.
+
+    """
+    def is_not_blank(l: str) -> bool:
+        return bool(l) and not l.isspace()
+
+    ret = list()
+
+    in_class = False
+    buffer = list()
+    for line in lines:
+        if not in_class:
+            if "class" in line:
+                in_class = True
+                ret.append(line)
+            else:
+                # Adding static variables, package info, etc.
+                # Skipping empty or space lines.
+                if is_not_blank(line):
+                    ret.append(line)
+        else:
+            # End of class
+            if line and line[0] == "}":
+                in_class = False
+                ret.extend(sorted(buffer))
+                buffer = list()
+                ret.append(line)
+            else:
+                if is_not_blank(line):
+                    buffer.append(line)
+
+    return ret
+
+def get_file_contents(file_path: str) -> List[str]:
+    lines = list()
+    with open(file_path) as f:
+        lines = [line.rstrip('\n') for line in f]
+        f.close()
+    return lines
+
+def pprint(l: List[str]) -> None:
+    for line in l:
+        print(line)
+
+def write_lines(out_dir: str, lines: List[str]) -> None:
+    with open(out_dir, "a") as f:
+        f.writelines(line + '\n' for line in lines)
+        f.write("\n")
+        f.close()
+
+def dir_exists(dir: str) -> bool:
+    return Path(dir).exists()
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument('first_dir', action='store', type=str,
+                        help="first path to compare file directory and contents")
+    parser.add_argument('second_dir', action='store', type=str,
+                        help="second path to compare file directory and contents")
+    parser.add_argument('--out', dest='out_dir',
+                        action='store', default="", type=str,
+                        help="optional directory to write log. If not set, will print to console")
+    parser.add_argument('--show-diff-file', dest='show_diff',
+                        action=argparse.BooleanOptionalAction,
+                        help="optional flag. If passed, will print out the content of the file unique to each directories")
+    parser.add_argument('--include-common', dest='include_common',
+                        action=argparse.BooleanOptionalAction,
+                        help="optional flag. If passed, will print out the contents common to both files as well,\
+                            instead of printing only diff lines.")
+    parser.add_argument('--skip-words', nargs='+',
+                        dest='skip_words', default=[], help="optional words to skip in comparison")
+
+    args = parser.parse_args()
+
+    if not args.first_dir or not args.second_dir:
+        parser.print_usage()
+        exit(0)
+
+    analyzer = FilesDiffAnalyzer(args)
+    analyzer.analyze()
diff --git a/tools/test_post_process_props.py b/tools/test_post_process_props.py
index 236f9ed..439fc9f 100644
--- a/tools/test_post_process_props.py
+++ b/tools/test_post_process_props.py
@@ -256,6 +256,7 @@
     with contextlib.redirect_stderr(stderr_redirect):
       props = PropList("hello")
       props.put("ro.board.first_api_level","25")
+      props.put("ro.build.version.codename", "REL")
 
       # ro.board.first_api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 20))
@@ -273,5 +274,10 @@
       # ro.board.api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 25))
 
+      # allow setting future api_level before release
+      props.get_all_props()[-2].make_as_comment()
+      props.put("ro.build.version.codename", "NonRel")
+      self.assertTrue(validate_grf_props(props, 24))
+
 if __name__ == '__main__':
     unittest.main(verbosity=2)
diff --git a/tools/warn/tidy_warn_patterns.py b/tools/warn/tidy_warn_patterns.py
index c138f1c..5ee66c0 100644
--- a/tools/warn/tidy_warn_patterns.py
+++ b/tools/warn/tidy_warn_patterns.py
@@ -224,6 +224,7 @@
     analyzer_warn_check('clang-analyzer-valist.Unterminated'),
     analyzer_group_check('clang-analyzer-core.uninitialized'),
     analyzer_group_check('clang-analyzer-deadcode'),
+    analyzer_warn_check('clang-analyzer-security.insecureAPI.DeprecatedOrUnsafeBufferHandling'),
     analyzer_warn_check('clang-analyzer-security.insecureAPI.bcmp'),
     analyzer_warn_check('clang-analyzer-security.insecureAPI.bcopy'),
     analyzer_warn_check('clang-analyzer-security.insecureAPI.bzero'),
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index fcad96c..689999e 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -40,14 +40,10 @@
  */
 status_t ZipEntry::initFromCDE(FILE* fp)
 {
-    status_t result;
-    long posn; // NOLINT(google-runtime-int), for ftell/fseek
-    bool hasDD;
-
     //ALOGV("initFromCDE ---\n");
 
     /* read the CDE */
-    result = mCDE.read(fp);
+    status_t result = mCDE.read(fp);
     if (result != OK) {
         ALOGD("mCDE.read failed\n");
         return result;
@@ -56,8 +52,8 @@
     //mCDE.dump();
 
     /* using the info in the CDE, go load up the LFH */
-    posn = ftell(fp);
-    if (fseek(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
+    off_t posn = ftello(fp);
+    if (fseeko(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
         ALOGD("local header seek failed (%" PRIu32 ")\n",
             mCDE.mLocalHeaderRelOffset);
         return UNKNOWN_ERROR;
@@ -69,7 +65,7 @@
         return result;
     }
 
-    if (fseek(fp, posn, SEEK_SET) != 0)
+    if (fseeko(fp, posn, SEEK_SET) != 0)
         return UNKNOWN_ERROR;
 
     //mLFH.dump();
@@ -80,7 +76,7 @@
      * compressed size, and uncompressed size will be zero.  In practice
      * these seem to be rare.
      */
-    hasDD = (mLFH.mGPBitFlag & kUsesDataDescr) != 0;
+    bool hasDD = (mLFH.mGPBitFlag & kUsesDataDescr) != 0;
     if (hasDD) {
         // do something clever
         //ALOGD("+++ has data descriptor\n");
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index f2f65a6..42cc349 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -35,6 +35,8 @@
 #include <assert.h>
 #include <inttypes.h>
 
+_Static_assert(sizeof(off_t) == 8, "off_t too small");
+
 namespace android {
 
 /*
@@ -205,56 +207,43 @@
  */
 status_t ZipFile::readCentralDir(void)
 {
-    status_t result = OK;
-    uint8_t* buf = NULL;
-    off_t fileLength, seekStart;
-    long readAmount;
-    int i;
-
-    fseek(mZipFp, 0, SEEK_END);
-    fileLength = ftell(mZipFp);
+    fseeko(mZipFp, 0, SEEK_END);
+    off_t fileLength = ftello(mZipFp);
     rewind(mZipFp);
 
     /* too small to be a ZIP archive? */
     if (fileLength < EndOfCentralDir::kEOCDLen) {
-        ALOGD("Length is %ld -- too small\n", (long)fileLength);
-        result = INVALID_OPERATION;
-        goto bail;
+        ALOGD("Length is %lld -- too small\n", (long long) fileLength);
+        return INVALID_OPERATION;
     }
 
-    buf = new uint8_t[EndOfCentralDir::kMaxEOCDSearch];
-    if (buf == NULL) {
-        ALOGD("Failure allocating %d bytes for EOCD search",
-             EndOfCentralDir::kMaxEOCDSearch);
-        result = NO_MEMORY;
-        goto bail;
-    }
-
+    off_t seekStart;
+    size_t readAmount;
     if (fileLength > EndOfCentralDir::kMaxEOCDSearch) {
         seekStart = fileLength - EndOfCentralDir::kMaxEOCDSearch;
         readAmount = EndOfCentralDir::kMaxEOCDSearch;
     } else {
         seekStart = 0;
-        readAmount = (long) fileLength;
+        readAmount = fileLength;
     }
-    if (fseek(mZipFp, seekStart, SEEK_SET) != 0) {
-        ALOGD("Failure seeking to end of zip at %ld", (long) seekStart);
-        result = UNKNOWN_ERROR;
-        goto bail;
+    if (fseeko(mZipFp, seekStart, SEEK_SET) != 0) {
+        ALOGD("Failure seeking to end of zip at %lld", (long long) seekStart);
+        return UNKNOWN_ERROR;
     }
 
     /* read the last part of the file into the buffer */
-    if (fread(buf, 1, readAmount, mZipFp) != (size_t) readAmount) {
+    uint8_t buf[EndOfCentralDir::kMaxEOCDSearch];
+    if (fread(buf, 1, readAmount, mZipFp) != readAmount) {
         if (feof(mZipFp)) {
-            ALOGW("fread %ld bytes failed, unexpected EOF", readAmount);
+            ALOGW("fread %zu bytes failed, unexpected EOF", readAmount);
         } else {
-            ALOGW("fread %ld bytes failed, %s", readAmount, strerror(errno));
+            ALOGW("fread %zu bytes failed, %s", readAmount, strerror(errno));
         }
-        result = UNKNOWN_ERROR;
-        goto bail;
+        return UNKNOWN_ERROR;
     }
 
     /* find the end-of-central-dir magic */
+    int i;
     for (i = readAmount - 4; i >= 0; i--) {
         if (buf[i] == 0x50 &&
             ZipEntry::getLongLE(&buf[i]) == EndOfCentralDir::kSignature)
@@ -265,15 +254,14 @@
     }
     if (i < 0) {
         ALOGD("EOCD not found, not Zip\n");
-        result = INVALID_OPERATION;
-        goto bail;
+        return INVALID_OPERATION;
     }
 
     /* extract eocd values */
-    result = mEOCD.readBuf(buf + i, readAmount - i);
+    status_t result = mEOCD.readBuf(buf + i, readAmount - i);
     if (result != OK) {
-        ALOGD("Failure reading %ld bytes of EOCD values", readAmount - i);
-        goto bail;
+        ALOGD("Failure reading %zu bytes of EOCD values", readAmount - i);
+        return result;
     }
     //mEOCD.dump();
 
@@ -281,8 +269,7 @@
         mEOCD.mNumEntries != mEOCD.mTotalNumEntries)
     {
         ALOGD("Archive spanning not supported\n");
-        result = INVALID_OPERATION;
-        goto bail;
+        return INVALID_OPERATION;
     }
 
     /*
@@ -299,11 +286,10 @@
      * The only thing we really need right now is the file comment, which
      * we're hoping to preserve.
      */
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
         ALOGD("Failure seeking to central dir offset %" PRIu32 "\n",
              mEOCD.mCentralDirOffset);
-        result = UNKNOWN_ERROR;
-        goto bail;
+        return UNKNOWN_ERROR;
     }
 
     /*
@@ -318,7 +304,7 @@
         if (result != OK) {
             ALOGD("initFromCDE failed\n");
             delete pEntry;
-            goto bail;
+            return result;
         }
 
         mEntries.add(pEntry);
@@ -336,20 +322,16 @@
             } else {
                 ALOGW("fread EOCD failed, %s", strerror(errno));
             }
-            result = INVALID_OPERATION;
-            goto bail;
+            return INVALID_OPERATION;
         }
         if (ZipEntry::getLongLE(checkBuf) != EndOfCentralDir::kSignature) {
             ALOGD("EOCD read check failed\n");
-            result = UNKNOWN_ERROR;
-            goto bail;
+            return UNKNOWN_ERROR;
         }
         ALOGV("+++ EOCD read check passed\n");
     }
 
-bail:
-    delete[] buf;
-    return result;
+    return OK;
 }
 
 
@@ -370,9 +352,8 @@
 {
     ZipEntry* pEntry = NULL;
     status_t result = OK;
-    long lfhPosn, startPosn, endPosn, uncompressedLen;
-    FILE* inputFp = NULL;
-    uint32_t crc;
+    off_t lfhPosn, startPosn, endPosn, uncompressedLen;
+    uint32_t crc = 0;
     time_t modWhen;
 
     if (mReadOnly)
@@ -389,13 +370,14 @@
     if (getEntryByName(storageName) != NULL)
         return ALREADY_EXISTS;
 
+    FILE* inputFp = NULL;
     if (!data) {
         inputFp = fopen(fileName, FILE_OPEN_RO);
         if (inputFp == NULL)
             return errnoToStatus(errno);
     }
 
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -413,9 +395,9 @@
      * as a place-holder.  In theory the LFH isn't necessary, but in
      * practice some utilities demand it.
      */
-    lfhPosn = ftell(mZipFp);
+    lfhPosn = ftello(mZipFp);
     pEntry->mLFH.write(mZipFp);
-    startPosn = ftell(mZipFp);
+    startPosn = ftello(mZipFp);
 
     /*
      * Copy the data in, possibly compressing it as we go.
@@ -432,11 +414,11 @@
              * to be set through an API call, but I don't expect our
              * criteria to change over time.
              */
-            long src = inputFp ? ftell(inputFp) : size;
-            long dst = ftell(mZipFp) - startPosn;
+            off_t src = inputFp ? ftello(inputFp) : size;
+            off_t dst = ftello(mZipFp) - startPosn;
             if (dst + (dst / 10) > src) {
-                ALOGD("insufficient compression (src=%ld dst=%ld), storing\n",
-                    src, dst);
+                ALOGD("insufficient compression (src=%lld dst=%lld), storing\n",
+                    (long long) src, (long long) dst);
                 failed = true;
             }
         }
@@ -444,7 +426,7 @@
         if (failed) {
             compressionMethod = ZipEntry::kCompressStored;
             if (inputFp) rewind(inputFp);
-            fseek(mZipFp, startPosn, SEEK_SET);
+            fseeko(mZipFp, startPosn, SEEK_SET);
             /* fall through to kCompressStored case */
         }
     }
@@ -463,7 +445,7 @@
     }
 
     // currently seeked to end of file
-    uncompressedLen = inputFp ? ftell(inputFp) : size;
+    uncompressedLen = inputFp ? ftello(inputFp) : size;
 
     /*
      * We could write the "Data Descriptor", but there doesn't seem to
@@ -471,7 +453,7 @@
      *
      * Update file offsets.
      */
-    endPosn = ftell(mZipFp);            // seeked to end of compressed data
+    endPosn = ftello(mZipFp);            // seeked to end of compressed data
 
     /*
      * Success!  Fill out new values.
@@ -489,7 +471,7 @@
     /*
      * Go back and write the LFH.
      */
-    if (fseek(mZipFp, lfhPosn, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, lfhPosn, SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -522,7 +504,7 @@
 
     // Calculate where the entry payload offset will end up if we were to write
     // it as-is.
-    uint64_t expectedPayloadOffset = ftell(mZipFp) +
+    uint64_t expectedPayloadOffset = ftello(mZipFp) +
         android::ZipEntry::LocalFileHeader::kLFHLen +
         pEntry->mLFH.mFileNameLength +
         pEntry->mLFH.mExtraFieldLength;
@@ -548,7 +530,7 @@
 {
     ZipEntry* pEntry = NULL;
     status_t result;
-    long lfhPosn, endPosn;
+    off_t lfhPosn, endPosn;
 
     if (mReadOnly)
         return INVALID_OPERATION;
@@ -557,7 +539,7 @@
     assert(mZipFp != NULL);
     assert(mEntries.size() == mEOCD.mTotalNumEntries);
 
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -585,7 +567,7 @@
      * Write the LFH.  Since we're not recompressing the data, we already
      * have all of the fields filled out.
      */
-    lfhPosn = ftell(mZipFp);
+    lfhPosn = ftello(mZipFp);
     pEntry->mLFH.write(mZipFp);
 
     /*
@@ -595,8 +577,7 @@
      * fields as well.  This is a fixed-size area immediately following
      * the data.
      */
-    if (fseek(pSourceZip->mZipFp, pSourceEntry->getFileOffset(), SEEK_SET) != 0)
-    {
+    if (fseeko(pSourceZip->mZipFp, pSourceEntry->getFileOffset(), SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -617,7 +598,7 @@
     /*
      * Update file offsets.
      */
-    endPosn = ftell(mZipFp);
+    endPosn = ftello(mZipFp);
 
     /*
      * Success!  Fill out new values.
@@ -654,7 +635,7 @@
 {
     ZipEntry* pEntry = NULL;
     status_t result;
-    long lfhPosn, uncompressedLen;
+    off_t lfhPosn, uncompressedLen;
 
     if (mReadOnly)
         return INVALID_OPERATION;
@@ -663,7 +644,7 @@
     assert(mZipFp != NULL);
     assert(mEntries.size() == mEOCD.mTotalNumEntries);
 
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -688,7 +669,7 @@
      * as a place-holder.  In theory the LFH isn't necessary, but in
      * practice some utilities demand it.
      */
-    lfhPosn = ftell(mZipFp);
+    lfhPosn = ftello(mZipFp);
     pEntry->mLFH.write(mZipFp);
 
     /*
@@ -698,8 +679,7 @@
      * fields as well.  This is a fixed-size area immediately following
      * the data.
      */
-    if (fseek(pSourceZip->mZipFp, pSourceEntry->getFileOffset(), SEEK_SET) != 0)
-    {
+    if (fseeko(pSourceZip->mZipFp, pSourceEntry->getFileOffset(), SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -712,7 +692,7 @@
             result = NO_MEMORY;
             goto bail;
         }
-        long startPosn = ftell(mZipFp);
+        off_t startPosn = ftello(mZipFp);
         uint32_t crc;
         if (compressFpToFp(mZipFp, NULL, buf, uncompressedLen, &crc) != OK) {
             ALOGW("recompress of '%s' failed\n", pEntry->mCDE.mFileName);
@@ -720,13 +700,12 @@
             free(buf);
             goto bail;
         }
-        long endPosn = ftell(mZipFp);
+        off_t endPosn = ftello(mZipFp);
         pEntry->setDataInfo(uncompressedLen, endPosn - startPosn,
             pSourceEntry->getCRC32(), ZipEntry::kCompressDeflated);
         free(buf);
     } else {
-        off_t copyLen;
-        copyLen = pSourceEntry->getCompressedLen();
+        off_t copyLen = pSourceEntry->getCompressedLen();
         if ((pSourceEntry->mLFH.mGPBitFlag & ZipEntry::kUsesDataDescr) != 0)
             copyLen += ZipEntry::kDataDescriptorLen;
 
@@ -746,12 +725,12 @@
     mEOCD.mNumEntries++;
     mEOCD.mTotalNumEntries++;
     mEOCD.mCentralDirSize = 0;      // mark invalid; set by flush()
-    mEOCD.mCentralDirOffset = ftell(mZipFp);
+    mEOCD.mCentralDirOffset = ftello(mZipFp);
 
     /*
      * Go back and write the LFH.
      */
-    if (fseek(mZipFp, lfhPosn, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, lfhPosn, SEEK_SET) != 0) {
         result = UNKNOWN_ERROR;
         goto bail;
     }
@@ -978,7 +957,7 @@
 status_t ZipFile::flush(void)
 {
     status_t result = OK;
-    long eocdPosn;
+    off_t eocdPosn;
     int i, count;
 
     if (mReadOnly)
@@ -992,8 +971,7 @@
     if (result != OK)
         return result;
 
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0)
-        return UNKNOWN_ERROR;
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) return UNKNOWN_ERROR;
 
     count = mEntries.size();
     for (i = 0; i < count; i++) {
@@ -1001,7 +979,7 @@
         pEntry->mCDE.write(mZipFp);
     }
 
-    eocdPosn = ftell(mZipFp);
+    eocdPosn = ftello(mZipFp);
     mEOCD.mCentralDirSize = eocdPosn - mEOCD.mCentralDirOffset;
 
     mEOCD.write(mZipFp);
@@ -1011,8 +989,8 @@
      * with plain files, or if we deleted some entries, there's a lot
      * of wasted space at the end of the file.  Remove it now.
      */
-    if (ftruncate(fileno(mZipFp), ftell(mZipFp)) != 0) {
-        ALOGW("ftruncate failed %ld: %s\n", ftell(mZipFp), strerror(errno));
+    if (ftruncate(fileno(mZipFp), ftello(mZipFp)) != 0) {
+        ALOGW("ftruncate failed %lld: %s\n", (long long) ftello(mZipFp), strerror(errno));
         // not fatal
     }
 
@@ -1141,32 +1119,32 @@
             if (getSize > n)
                 getSize = n;
 
-            if (fseek(fp, (long) src, SEEK_SET) != 0) {
-                ALOGW("filemove src seek %ld failed, %s",
-                    (long) src, strerror(errno));
+            if (fseeko(fp, src, SEEK_SET) != 0) {
+                ALOGW("filemove src seek %lld failed, %s",
+                    (long long) src, strerror(errno));
                 return UNKNOWN_ERROR;
             }
 
             if (fread(readBuf, 1, getSize, fp) != getSize) {
                 if (feof(fp)) {
-                    ALOGW("fread %zu bytes off=%ld failed, unexpected EOF",
-                        getSize, (long) src);
+                    ALOGW("fread %zu bytes off=%lld failed, unexpected EOF",
+                        getSize, (long long) src);
                 } else {
-                    ALOGW("fread %zu bytes off=%ld failed, %s",
-                        getSize, (long) src, strerror(errno));
+                    ALOGW("fread %zu bytes off=%lld failed, %s",
+                        getSize, (long long) src, strerror(errno));
                 }
                 return UNKNOWN_ERROR;
             }
 
-            if (fseek(fp, (long) dst, SEEK_SET) != 0) {
-                ALOGW("filemove dst seek %ld failed, %s",
-                    (long) dst, strerror(errno));
+            if (fseeko(fp, dst, SEEK_SET) != 0) {
+                ALOGW("filemove dst seek %lld failed, %s",
+                    (long long) dst, strerror(errno));
                 return UNKNOWN_ERROR;
             }
 
             if (fwrite(readBuf, 1, getSize, fp) != getSize) {
-                ALOGW("filemove write %zu off=%ld failed, %s",
-                    getSize, (long) dst, strerror(errno));
+                ALOGW("filemove write %zu off=%lld failed, %s",
+                    getSize, (long long) dst, strerror(errno));
                 return UNKNOWN_ERROR;
             }
 
@@ -1263,10 +1241,10 @@
 
     bool ReadAtOffset(uint8_t* buf, size_t len, off64_t offset) const {
         // Data is usually requested sequentially, so this helps avoid pointless
-        // fseeks every time we perform a read. There's an impedence mismatch
+        // seeks every time we perform a read. There's an impedence mismatch
         // here because the original API was designed around pread and pwrite.
         if (offset != current_offset_) {
-            if (fseek(fp_, offset, SEEK_SET) != 0) {
+            if (fseeko(fp_, offset, SEEK_SET) != 0) {
                 return false;
             }
 
@@ -1298,10 +1276,10 @@
         return NULL;
     }
 
-    fseek(mZipFp, 0, SEEK_SET);
+    fseeko(mZipFp, 0, SEEK_SET);
 
     off_t offset = entry->getFileOffset();
-    if (fseek(mZipFp, offset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, offset, SEEK_SET) != 0) {
         goto bail;
     }
 
diff --git a/tools/ziptime/ZipEntry.cpp b/tools/ziptime/ZipEntry.cpp
index e7b52ed..c8eb377 100644
--- a/tools/ziptime/ZipEntry.cpp
+++ b/tools/ziptime/ZipEntry.cpp
@@ -43,19 +43,16 @@
  */
 status_t ZipEntry::initAndRewriteFromCDE(FILE* fp)
 {
-    status_t result;
-    long posn;
-
     /* read the CDE */
-    result = mCDE.rewrite(fp);
+    status_t result = mCDE.rewrite(fp);
     if (result != 0) {
         LOG("mCDE.rewrite failed\n");
         return result;
     }
 
     /* using the info in the CDE, go load up the LFH */
-    posn = ftell(fp);
-    if (fseek(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
+    off_t posn = ftello(fp);
+    if (fseeko(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
         LOG("local header seek failed (%" PRIu32 ")\n",
             mCDE.mLocalHeaderRelOffset);
         return -1;
@@ -67,7 +64,7 @@
         return result;
     }
 
-    if (fseek(fp, posn, SEEK_SET) != 0)
+    if (fseeko(fp, posn, SEEK_SET) != 0)
         return -1;
 
     return 0;
diff --git a/tools/ziptime/ZipFile.cpp b/tools/ziptime/ZipFile.cpp
index 1d111af..3002a65 100644
--- a/tools/ziptime/ZipFile.cpp
+++ b/tools/ziptime/ZipFile.cpp
@@ -40,8 +40,7 @@
     /* open the file */
     mZipFp = fopen(zipFileName, "r+b");
     if (mZipFp == NULL) {
-        int err = errno;
-        LOG("fopen failed: %d\n", err);
+        LOG("fopen \"%s\" failed: %s\n", zipFileName, strerror(errno));
         return -1;
     }
 
@@ -72,52 +71,39 @@
  */
 status_t ZipFile::rewriteCentralDir(void)
 {
-    status_t result = 0;
-    uint8_t* buf = NULL;
-    off_t fileLength, seekStart;
-    long readAmount;
-    int i;
-
-    fseek(mZipFp, 0, SEEK_END);
-    fileLength = ftell(mZipFp);
+    fseeko(mZipFp, 0, SEEK_END);
+    off_t fileLength = ftello(mZipFp);
     rewind(mZipFp);
 
     /* too small to be a ZIP archive? */
     if (fileLength < EndOfCentralDir::kEOCDLen) {
-        LOG("Length is %ld -- too small\n", (long)fileLength);
-        result = -1;
-        goto bail;
+        LOG("Length is %lld -- too small\n", (long long) fileLength);
+        return -1;
     }
 
-    buf = new uint8_t[EndOfCentralDir::kMaxEOCDSearch];
-    if (buf == NULL) {
-        LOG("Failure allocating %d bytes for EOCD search",
-             EndOfCentralDir::kMaxEOCDSearch);
-        result = -1;
-        goto bail;
-    }
-
+    off_t seekStart;
+    size_t readAmount;
     if (fileLength > EndOfCentralDir::kMaxEOCDSearch) {
         seekStart = fileLength - EndOfCentralDir::kMaxEOCDSearch;
         readAmount = EndOfCentralDir::kMaxEOCDSearch;
     } else {
         seekStart = 0;
-        readAmount = (long) fileLength;
+        readAmount = fileLength;
     }
-    if (fseek(mZipFp, seekStart, SEEK_SET) != 0) {
-        LOG("Failure seeking to end of zip at %ld", (long) seekStart);
-        result = -1;
-        goto bail;
+    if (fseeko(mZipFp, seekStart, SEEK_SET) != 0) {
+        LOG("Failure seeking to end of zip at %lld", (long long) seekStart);
+        return -1;
     }
 
     /* read the last part of the file into the buffer */
-    if (fread(buf, 1, readAmount, mZipFp) != (size_t) readAmount) {
-        LOG("short file? wanted %ld\n", readAmount);
-        result = -1;
-        goto bail;
+    uint8_t buf[EndOfCentralDir::kMaxEOCDSearch];
+    if (fread(buf, 1, readAmount, mZipFp) != readAmount) {
+        LOG("short file? wanted %zu\n", readAmount);
+        return -1;
     }
 
     /* find the end-of-central-dir magic */
+    int i;
     for (i = readAmount - 4; i >= 0; i--) {
         if (buf[i] == 0x50 &&
             ZipEntry::getLongLE(&buf[i]) == EndOfCentralDir::kSignature)
@@ -127,15 +113,14 @@
     }
     if (i < 0) {
         LOG("EOCD not found, not Zip\n");
-        result = -1;
-        goto bail;
+        return -1;
     }
 
     /* extract eocd values */
-    result = mEOCD.readBuf(buf + i, readAmount - i);
+    status_t result = mEOCD.readBuf(buf + i, readAmount - i);
     if (result != 0) {
-        LOG("Failure reading %ld bytes of EOCD values", readAmount - i);
-        goto bail;
+        LOG("Failure reading %zu bytes of EOCD values", readAmount - i);
+        return result;
     }
 
     /*
@@ -152,49 +137,39 @@
      * The only thing we really need right now is the file comment, which
      * we're hoping to preserve.
      */
-    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+    if (fseeko(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
         LOG("Failure seeking to central dir offset %" PRIu32 "\n",
              mEOCD.mCentralDirOffset);
-        result = -1;
-        goto bail;
+        return -1;
     }
 
     /*
      * Loop through and read the central dir entries.
      */
-    int entry;
-    for (entry = 0; entry < mEOCD.mTotalNumEntries; entry++) {
+    for (int entry = 0; entry < mEOCD.mTotalNumEntries; entry++) {
         ZipEntry* pEntry = new ZipEntry;
-
         result = pEntry->initAndRewriteFromCDE(mZipFp);
+        delete pEntry;
         if (result != 0) {
             LOG("initFromCDE failed\n");
-            delete pEntry;
-            goto bail;
+            return -1;
         }
-
-        delete pEntry;
     }
 
-
     /*
      * If all went well, we should now be back at the EOCD.
      */
     uint8_t checkBuf[4];
     if (fread(checkBuf, 1, 4, mZipFp) != 4) {
         LOG("EOCD check read failed\n");
-        result = -1;
-        goto bail;
+        return -1;
     }
     if (ZipEntry::getLongLE(checkBuf) != EndOfCentralDir::kSignature) {
         LOG("EOCD read check failed\n");
-        result = -1;
-        goto bail;
+        return -1;
     }
 
-bail:
-    delete[] buf;
-    return result;
+    return 0;
 }
 
 /*