Merge "Use Z_BEST_COMPRESSION for compress-package"
diff --git a/Changes.md b/Changes.md
index 8979e30..fc6701d 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,33 @@
 # Build System Changes for Android.mk Writers
 
+## Perform validation of Soong plugins
+
+Each Soong plugin will require manual work to migrate to Bazel. In order to
+minimize the manual work outside of build/soong, we are restricting plugins to
+those that exist today and those in vendor or hardware directories.
+
+If you need to extend the build system via a plugin, please reach out to the
+build team via email android-building@googlegroups.com (external) for any
+questions, or see [go/soong](http://go/soong) (internal).
+
+To omit the validation, `BUILD_BROKEN_PLUGIN_VALIDATION` expects a list of
+plugins to omit from the validation.
+
+## Python 2 to 3 migration
+
+The path set when running builds now makes the `python` executable point to python 3,
+whereas on previous versions it pointed to python 2. If you still have python 2 scripts,
+you can change the shebang line to use `python2` explicitly. This only applies for
+scripts run directly from makefiles, or from soong genrules. This behavior can be
+temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment
+variable to `true`. It's only an environment variable and not a product config variable
+because product config sometimes calls python code.
+
+In addition, `python_*` soong modules no longer allow python 2. This can be temporarily
+overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration
+variable to `true`.
+
+Python 2 is slated for complete removal in V.
 ## Stop referencing sysprop_library directly from cc modules
 
 For the migration to Bazel, we are no longer mapping sysprop_library targets
@@ -477,6 +505,24 @@
 
 will copy `bar/baz` into `$DIST_DIR/baz` when `m foo dist` is run.
 
+#### FILE_NAME_TAG  {#FILE_NAME_TAG}
+
+To embed the `BUILD_NUMBER` (or for local builds, `eng.${USER}`), include
+`FILE_NAME_TAG_PLACEHOLDER` in the destination:
+
+``` make
+# you can use dist-for-goals-with-filenametag function
+$(call dist-for-goals-with-filenametag,foo,bar.zip)
+# or use FILE_NAME_TAG_PLACEHOLDER manually
+$(call dist-for-goals,foo,bar.zip:baz-FILE_NAME_TAG_PLACEHOLDER.zip)
+```
+
+Which will produce `$DIST_DIR/baz-1234567.zip` on build servers which set
+`BUILD_NUMBER=1234567`, or `$DIST_DIR/baz-eng.builder.zip` for local builds.
+
+If you just want to append `BUILD_NUMBER` at the end of basename, use
+`dist-for-goals-with-filenametag` instead of `dist-for-goals`.
+
 #### Renames during copy
 
 Instead of specifying just a file, a destination name can be specified,
@@ -818,7 +864,7 @@
 
 ### Stop using clang property
 
-Clang has been deleted from Soong. To fix any build errors, remove the clang
+The clang property has been deleted from Soong. To fix any build errors, remove the clang
 property from affected Android.bp files using bpmodify.
 
 
diff --git a/core/Makefile b/core/Makefile
index e647919..18ba59e 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -17,6 +17,54 @@
 SYSTEM_DLKM_NOTICE_DEPS :=
 
 # -----------------------------------------------------------------
+# Release Config Flags
+
+# Create a summary file of build flags for each partition
+# $(1): build flags json file
+# $(2): flag names
+define generate-partition-build-flag-file
+$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
+$(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(2)))
+$(strip $(1)):
+	mkdir -p $$(dir $$(PRIVATE_OUT))
+	echo '{' > $$(PRIVATE_OUT)
+	echo '"flags": [' >> $$(PRIVATE_OUT)
+	$$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
+		( \
+			printf '  { "name": "%s", "value": "%s", ' \
+					'$$(flag)' \
+					'$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
+					; \
+			printf '"set": "%s", "default": "%s", "declared": "%s" }' \
+					'$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
+					'$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
+					'$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
+					; \
+			printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
+		) >> $$(PRIVATE_OUT) \
+	)
+	echo "]" >> $$(PRIVATE_OUT)
+	echo "}" >> $$(PRIVATE_OUT)
+endef
+
+_FLAG_PARTITIONS := product system system_ext vendor
+
+$(foreach partition, $(_FLAG_PARTITIONS), \
+	$(eval BUILD_FLAG_SUMMARIES.$(partition) \
+			:= $(TARGET_OUT_FLAGS)/$(partition)/etc/build_flags.json) \
+	$(eval $(call generate-partition-build-flag-file, \
+				$(BUILD_FLAG_SUMMARIES.$(partition)), \
+				$(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) \
+            ) \
+    ) \
+)
+
+# TODO: Remove
+.PHONY: flag-files
+flag-files: $(foreach partition, $(_FLAG_PARTITIONS), \
+		$(TARGET_OUT_FLAGS)/$(partition)/etc/build_flags.json)
+
+# -----------------------------------------------------------------
 # Define rules to copy PRODUCT_COPY_FILES defined by the product.
 # PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
 # <dest file> is relative to $(PRODUCT_OUT), so it should look like,
@@ -474,7 +522,10 @@
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)))) \
   $(if $(filter false,$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver))),\
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) :=),) \
-  $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver),$(7),$(8)))) \
+  $(eval _files := $(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver),$(7),$(8))) \
+  $(call copy-many-files,$(_files)) \
+  $(eval _modules := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)) ANDROID-GEN ANDROID-GEN ANDROID-GEN ANDROID-GEN) \
+  $(eval KERNEL_MODULE_COPY_FILES += $(join $(addsuffix :,$(_modules)),$(_files)))) \
 $(if $(_kver), \
   $(eval _dir := $(_kver)/), \
   $(eval _dir :=)) \
@@ -487,6 +538,7 @@
   $(eval $(call build-image-kernel-modules-blocklist-file, \
     $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
     $(2)/lib/modules/$(_dir)modules.blocklist)) \
+  $(eval ALL_KERNEL_MODULES_BLOCKLIST += $(2)/lib/modules/$(_dir)modules.blocklist) \
   $(2)/lib/modules/$(_dir)modules.blocklist)
 endef
 
@@ -526,6 +578,24 @@
     $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_OUT_VENDOR))))
 endef
 
+# $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-ramdisk-charger-load
+$(if $(filter top,$(1)),\
+  $(eval _kver :=)$(eval _sep :=),\
+  $(eval _kver := $(1))$(eval _sep :=_))\
+  $(if $(BOARD_VENDOR_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+    $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_ramdisk_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_VENDOR_RAMDISK_OUT))))
+endef
+
+# $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-kernel-ramdisk-charger-load
+$(if $(filter top,$(1)),\
+  $(eval _kver :=)$(eval _sep :=),\
+  $(eval _kver := $(1))$(eval _sep :=_))\
+  $(if $(BOARD_VENDOR_KERNEL_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+    $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_kernel_ramdisk_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT))))
+endef
+
 ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
   # If there is no vendor boot partition, store vendor ramdisk kernel modules in the
   # boot ramdisk.
@@ -591,6 +661,8 @@
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-recovery-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd),$(BOARD_SYSTEM_KERNEL_MODULES),system)) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
   $(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
@@ -641,7 +713,7 @@
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-apkcerts-$(FILE_NAME_TAG)
+name := $(name)-apkcerts
 intermediates := \
 	$(call intermediates-dir-for,PACKAGING,apkcerts)
 APKCERTS_FILE := $(intermediates)/$(name).txt
@@ -931,6 +1003,42 @@
 
 BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
 
+
+ifneq ($(BOARD_KERNEL_MODULES_16K),)
+
+TARGET_OUT_RAMDISK_16K := $(PRODUCT_OUT)/ramdisk_16k
+BUILT_RAMDISK_16K_TARGET := $(PRODUCT_OUT)/ramdisk_16k.img
+RAMDISK_16K_STAGING_DIR := $(call intermediates-dir-for,PACKAGING,depmod_ramdisk_16k)
+
+$(BUILT_RAMDISK_16K_TARGET): $(DEPMOD) $(MKBOOTFS)
+$(BUILT_RAMDISK_16K_TARGET): $(call copy-many-files,$(foreach file,$(BOARD_KERNEL_MODULES_16K),$(file):$(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file))))
+	$(DEPMOD) -b $(RAMDISK_16K_STAGING_DIR) 0.0
+	for MODULE in $(BOARD_KERNEL_MODULES_16K); do \
+		basename $$MODULE >> $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/modules.load ; \
+	done;
+	mkdir -p $(TARGET_OUT_RAMDISK_16K)/lib
+	rm -rf $(TARGET_OUT_RAMDISK_16K)/lib/modules
+	cp -r $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0 $(TARGET_OUT_RAMDISK_16K)/lib/modules
+	$(MKBOOTFS) $(TARGET_OUT_RAMDISK_16K) > $@
+
+# Builds a ramdisk using modules defined in BOARD_KERNEL_MODULES_16K
+ramdisk_16k: $(BUILT_RAMDISK_16K_TARGET)
+.PHONY: ramdisk_16k
+
+endif
+
+ifneq ($(BOARD_KERNEL_PATH_16K),)
+BUILT_KERNEL_16K_TARGET := $(PRODUCT_OUT)/kernel_16k
+
+$(eval $(call copy-one-file,$(BOARD_KERNEL_PATH_16K),$(BUILT_KERNEL_16K_TARGET)))
+
+# Copies BOARD_KERNEL_PATH_16K to output directory as is
+kernel_16k: $(BUILT_KERNEL_16K_TARGET)
+.PHONY: kernel_16k
+
+endif
+
+
 ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
 # -l enables the legacy format used by the Linux kernel
 COMPRESSION_COMMAND_DEPS := $(LZ4)
@@ -1218,6 +1326,7 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_BOOTIMAGE_TARGET): $(INTERNAL_PREBUILT_BOOTIMAGE) $(AVBTOOL) $(BOARD_AVB_BOOT_KEY_PATH)
 	cp $(INTERNAL_PREBUILT_BOOTIMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_BOOTIMAGE_PARTITION_SIZE)) \
@@ -1286,6 +1395,7 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_INIT_BOOT_IMAGE_TARGET): $(INTERNAL_PREBUILT_INIT_BOOT_IMAGE) $(AVBTOOL) $(BOARD_AVB_INIT_BOOT_KEY_PATH)
 	cp $(INTERNAL_PREBUILT_INIT_BOOT_IMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \
@@ -1635,6 +1745,21 @@
 target_system_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.xml.gz
 installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
 
+ALL_INSTALLED_NOTICE_FILES := \
+  $(installed_notice_html_or_xml_gz) \
+  $(installed_vendor_notice_xml_gz) \
+  $(installed_product_notice_xml_gz) \
+  $(installed_system_ext_notice_xml_gz) \
+  $(installed_odm_notice_xml_gz) \
+  $(installed_vendor_dlkm_notice_xml_gz) \
+  $(installed_odm_dlkm_notice_xml_gz) \
+  $(installed_system_dlkm_notice_xml_gz) \
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system_ext/etc/NOTICE.xml.gz
+define is-notice-file
+$(if $(findstring $1,$(ALL_INSTALLED_NOTICE_FILES)),Y)
+endef
+
 # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
@@ -2960,7 +3085,7 @@
 
 endif # BUILDING_DEBUG_BOOT_IMAGE || BUILDING_DEBUG_VENDOR_BOOT_IMAGE
 
-
+PARTITION_COMPAT_SYMLINKS :=
 # Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
 # $1: from location (e.g $(TARGET_OUT)/vendor)
 # $2: destination location (e.g. /vendor)
@@ -2978,6 +3103,7 @@
 	ln -sfn $2 $1
 $1: .KATI_SYMLINK_OUTPUTS := $1
 )
+$(eval PARTITION_COMPAT_SYMLINKS += $1)
 $1
 endef
 
@@ -3061,15 +3187,19 @@
 	    --cert $$(PRIVATE_KEY).x509.pem \
 	    --key $$(PRIVATE_KEY).pk8
 
-ALL_DEFAULT_INSTALLED_MODULES += $(1)
+$(1).idsig: $(1)
+
+ALL_DEFAULT_INSTALLED_MODULES += $(1) $(1).idsig
 
 endef  # fsverity-generate-and-install-manifest-apk
 
 $(eval $(call fsverity-generate-and-install-manifest-apk, \
   $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk,system))
+ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk.idsig
 ifdef BUILDING_SYSTEM_EXT_IMAGE
   $(eval $(call fsverity-generate-and-install-manifest-apk, \
     $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk,system_ext))
+  ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk.idsig
 endif
 
 endif  # PRODUCT_FSVERITY_GENERATE_METADATA
@@ -3084,17 +3214,23 @@
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  _vendor_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_vendor_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_symlink)
 endif
 
 # Create symlink /system/product to /product if necessary.
 ifdef BOARD_USES_PRODUCTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  _product_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_product_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_product_symlink)
 endif
 
 # Create symlink /system/system_ext to /system_ext if necessary.
 ifdef BOARD_USES_SYSTEM_EXTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  _systemext_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_systemext_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_systemext_symlink)
 endif
 
 # -----------------------------------------------------------------
@@ -3107,7 +3243,9 @@
 # - /system/lib/modules is a symlink to a directory that stores system DLKMs.
 # - The system_dlkm partition is mounted at /system_dlkm at runtime.
 ifdef BOARD_USES_SYSTEM_DLKMIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  _system_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_system_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_system_dlkm_lib_modules_symlink)
 endif
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
@@ -3131,16 +3269,20 @@
 SYSTEM_LINKER_CONFIG := $(TARGET_OUT)/etc/linker.config.pb
 SYSTEM_LINKER_CONFIG_SOURCE := $(call intermediates-dir-for,ETC,system_linker_config)/system_linker_config
 $(SYSTEM_LINKER_CONFIG): PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE := $(SYSTEM_LINKER_CONFIG_SOURCE)
-$(SYSTEM_LINKER_CONFIG) : $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+$(SYSTEM_LINKER_CONFIG): $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE) \
-	  --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \
-	 --value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
+		--value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
 
 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
 $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE))
 
 FULL_SYSTEMIMAGE_DEPS += $(SYSTEM_LINKER_CONFIG)
+ALL_DEFAULT_INSTALLED_MODULES += $(SYSTEM_LINKER_CONFIG)
 
 # installed file list
 # Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
@@ -3250,8 +3392,8 @@
 
 endif # BUILDING_SYSTEM_IMAGE
 
-.PHONY: sync syncsys
-sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES)
+.PHONY: sync syncsys sync_system
+sync syncsys sync_system: $(INTERNAL_SYSTEMIMAGE_FILES)
 
 # -----------------------------------------------------------------
 # Old PDK fusion targets
@@ -3492,7 +3634,9 @@
 
 # Create symlink /vendor/odm to /odm if necessary.
 ifdef BOARD_USES_ODMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  _odm_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_odm_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_symlink)
 endif
 
 # Create symlinks for vendor_dlkm on devices with a vendor_dlkm partition:
@@ -3510,20 +3654,26 @@
 # The vendor DLKMs and other vendor_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_VENDOR_DLKMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  _vendor_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_vendor_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_dlkm_lib_modules_symlink)
 endif
 
-# Install vendor/etc/linker.config.pb when PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS is set
-ifneq ($(strip $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)),)
+# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
 vendor_linker_config_file := $(TARGET_OUT_VENDOR)/etc/linker.config.pb
 $(vendor_linker_config_file): private_linker_config_fragments := $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)
-$(vendor_linker_config_file): $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+$(vendor_linker_config_file): $(INTERNAL_VENDORIMAGE_FILES) $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config proto \
 		--source $(call normalize-path-list,$(private_linker_config_fragments)) \
 		--output $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@ \
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
 $(call define declare-0p-target,$(vendor_linker_config_file),)
 INTERNAL_VENDORIMAGE_FILES += $(vendor_linker_config_file)
-endif
+ALL_DEFAULT_INSTALLED_MODULES += $(vendor_linker_config_file)
 
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
 INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
@@ -3571,7 +3721,8 @@
 vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-vendorimage-target)
 
-sync: $(INTERNAL_VENDORIMAGE_FILES)
+.PHONY: sync_vendor
+sync sync_vendor: $(INTERNAL_VENDORIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_VENDORIMAGE
 INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
@@ -3635,7 +3786,8 @@
 productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-productimage-target)
 
-sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+.PHONY: sync_product
+sync sync_product: $(INTERNAL_PRODUCTIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_PRODUCTIMAGE
 INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
@@ -3697,7 +3849,8 @@
 systemextimage-nodeps senod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-system_extimage-target)
 
-sync: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+.PHONY: sync_system_ext
+sync sync_system_ext: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
 INSTALLED_SYSTEM_EXTIMAGE_TARGET := $(PRODUCT_OUT)/system_ext.img
@@ -3727,7 +3880,9 @@
 # The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_ODM_DLKMIMAGE
-  INTERNAL_ODMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  _odm_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  INTERNAL_ODMIMAGE_FILES += $(_odm_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_dlkm_lib_modules_symlink)
 endif
 
 INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
@@ -3776,7 +3931,8 @@
 odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-odmimage-target)
 
-sync: $(INTERNAL_ODMIMAGE_FILES)
+.PHONY: sync_odm
+sync sync_odm: $(INTERNAL_ODMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_ODMIMAGE
 INSTALLED_ODMIMAGE_TARGET := $(PRODUCT_OUT)/odm.img
@@ -3837,7 +3993,8 @@
 vendor_dlkmimage-nodeps vdnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-vendor_dlkmimage-target)
 
-sync: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+.PHONY: sync_vendor_dlkm
+sync sync_vendor_dlkm: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
 INSTALLED_VENDOR_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/vendor_dlkm.img
@@ -3898,7 +4055,8 @@
 odm_dlkmimage-nodeps odnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-odm_dlkmimage-target)
 
-sync: $(INTERNAL_ODM_DLKMIMAGE_FILES)
+.PHONY: sync_odm_dlkm
+sync sync_odm_dlkm: $(INTERNAL_ODM_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
 INSTALLED_ODM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/odm_dlkm.img
@@ -3961,7 +4119,8 @@
 system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
 	$(build-system_dlkmimage-target)
 
-sync: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+.PHONY: sync_system_dlkm
+sync sync_system_dlkm: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
 
 else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
 INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/system_dlkm.img
@@ -3976,6 +4135,7 @@
 ifeq ($(BOARD_AVB_ENABLE),true)
 $(INSTALLED_DTBOIMAGE_TARGET): $(BOARD_PREBUILT_DTBOIMAGE) $(AVBTOOL) $(BOARD_AVB_DTBO_KEY_PATH)
 	cp $(BOARD_PREBUILT_DTBOIMAGE) $@
+	chmod +w $@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_DTBOIMG_PARTITION_SIZE)) \
@@ -4723,7 +4883,7 @@
 check_vintf_all_deps += $(check_vintf_system_log)
 $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
 	@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
-$(call declare-0p-target,$(check_vintf_system_log))
+$(call declare-1p-target,$(check_vintf_system_log))
 check_vintf_system_log :=
 
 # -- Check framework manifest against frozen manifests for GSI targets. They need to be compatible.
@@ -4735,7 +4895,7 @@
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
 	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
-$(call declare-0p-target,$(vintffm_log))
+$(call declare-1p-target,$(vintffm_log))
 
 endif # check_vintf_system_deps
 check_vintf_system_deps :=
@@ -4759,7 +4919,7 @@
 	  ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) --dirmap /apex:$(APEX_OUT) \
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       > $@ 2>&1 ) || ( cat $@ && exit 1 ); )
-$(call declare-0p-target,$(check_vintf_vendor_log))
+$(call declare-1p-target,$(check_vintf_vendor_log))
 check_vintf_vendor_log :=
 endif # check_vintf_vendor_deps
 check_vintf_vendor_deps :=
@@ -4781,8 +4941,8 @@
 $(BUILT_KERNEL_VERSION_FILE):
 	echo $(BOARD_KERNEL_VERSION) > $@
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
-$(call declare-0p-target,$(BUILT_KERNEL_VERSION_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+$(call declare-license-metadata,$(BUILT_KERNEL_VERSION_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # BOARD_KERNEL_VERSION
@@ -4807,7 +4967,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_KERNEL_TARGET
@@ -4828,7 +4988,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_BOOTIMAGE_TARGET
@@ -4920,7 +5080,7 @@
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       >> $@ 2>&1 ) || (cat $@ && exit 1); ))
 
-$(call declare-0p-target,$(check_vintf_compatible_log))
+$(call declare-1p-target,$(check_vintf_compatible_log))
 
 check_vintf_compatible_log :=
 check_vintf_compatible_args :=
@@ -4985,7 +5145,7 @@
 	  $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/misc_info.txt, \
 	  $@)
 
-$(call declare-0p-target,$(check_all_partition_sizes_log))
+$(call declare-1p-target,$(check_all_partition_sizes_log))
 
 .PHONY: check-all-partition-sizes
 check-all-partition-sizes: $(check_all_partition_sizes_log)
@@ -5073,6 +5233,7 @@
   check_target_files_signatures \
   check_target_files_vintf \
   checkvintf \
+  create_brick_ota \
   delta_generator \
   e2fsck \
   e2fsdroid \
@@ -5094,6 +5255,7 @@
   lz4 \
   make_f2fs \
   make_f2fs_casefold \
+  merge_ota \
   merge_target_files \
   minigzip \
   mk_combined_img \
@@ -5140,6 +5302,7 @@
   debugfs_static \
   dump_apex_info \
   fsck.erofs \
+  make_erofs \
   merge_zips \
   resize2fs \
   soong_zip \
@@ -5214,9 +5377,72 @@
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
 
+$(call dist-for-goals, otatools-package, \
+  $(BUILT_OTATOOLS_PACKAGE) \
+)
+
 endif # build_otatools_package
 
 # -----------------------------------------------------------------
+#  fastboot-info.txt
+FASTBOOT_INFO_VERSION = 1
+
+INSTALLED_FASTBOOT_INFO_TARGET := $(PRODUCT_OUT)/fastboot-info.txt
+
+$(INSTALLED_FASTBOOT_INFO_TARGET):
+	rm -f $@
+	$(call pretty,"Target fastboot-info.txt: $@")
+	$(hide) echo "# fastboot-info for $(TARGET_PRODUCT)" >> $@
+	$(hide) echo "version $(FASTBOOT_INFO_VERSION)" >> $@
+ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
+	$(hide) echo "flash boot" >> $@
+endif
+ifneq ($(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
+	$(hide) echo "flash init_boot" >> $@
+endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+	$(hide) echo "flash dtbo" >> $@
+endif
+ifneq ($(INSTALLED_VENDOR_KERNEL_BOOTIMAGE_TARGET),)
+	$(hide) echo "flash vendor_kernel_boot" >> $@
+endif
+ifeq ($(BOARD_USES_PVMFWIMAGE),true)
+	$(hide) echo "flash pvmfw" >> $@
+endif
+ifeq ($(BOARD_AVB_ENABLE),true)
+ifeq ($(BUILDING_VBMETA_IMAGE),true)
+	$(hide) echo "flash --apply-vbmeta vbmeta" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
+	$(hide) echo "flash vbmeta_system" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
+	$(hide) echo "flash --apply-vbmeta vbmeta_vendor" >> $@
+endif
+ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
+	$(hide) echo "flash vendor_boot" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)))
+	$(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
+	  echo "flash vbmeta_$(partition)" >> $@;)
+endif
+endif # BOARD_AVB_ENABLE
+	$(hide) echo "reboot fastboot" >> $@
+	$(hide) echo "update-super" >> $@
+	$(hide) $(foreach partition,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+	  echo "flash $(partition)" >> $@;)
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+	$(hide) echo "flash --slot-other system system_other.img" >> $@
+endif
+ifdef BUILDING_CACHE_IMAGE
+	$(hide) echo "if-wipe erase cache" >> $@
+endif
+	$(hide) echo "if-wipe erase userdata" >> $@
+ifeq ($(BOARD_USES_METADATA_PARTITION),true)
+	$(hide) echo "if-wipe erase metadata" >> $@
+endif
+
+# -----------------------------------------------------------------
 #  misc_info.txt
 
 INSTALLED_MISC_INFO_TARGET := $(PRODUCT_OUT)/misc_info.txt
@@ -5466,6 +5692,13 @@
 	$(hide) echo "target_flatten_apex=false" >> $@
 endif
 
+$(call declare-0p-target,$(INSTALLED_FASTBOOT_INFO_TARGET))
+
+.PHONY: fastboot_info
+fastboot_info: $(INSTALLED_FASTBOOT_INFO_TARGET)
+
+droidcore-unbundled: $(INSTALLED_FASTBOOT_INFO_TARGET)
+
 $(call declare-0p-target,$(INSTALLED_MISC_INFO_TARGET))
 
 .PHONY: misc_info
@@ -5482,13 +5715,15 @@
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-target_files-$(FILE_NAME_TAG)
+name := $(name)-target_files
 
 intermediates := $(call intermediates-dir-for,PACKAGING,target_files)
+BUILT_TARGET_FILES_DIR := $(intermediates)/$(name).zip.list
 BUILT_TARGET_FILES_PACKAGE := $(intermediates)/$(name).zip
-$(BUILT_TARGET_FILES_PACKAGE): intermediates := $(intermediates)
-$(BUILT_TARGET_FILES_PACKAGE): \
-	    zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_PACKAGE): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates)
+
 
 # $(1): Directory to copy
 # $(2): Location to copy it to
@@ -5508,10 +5743,10 @@
     $(call intermediates-dir-for,EXECUTABLES,updater)/updater
 endif
 
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_OTA_TOOLS := $(built_ota_tools)
 
 tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
 updater_dep :=
 ifeq ($(AB_OTA_UPDATER),true)
@@ -5527,23 +5762,23 @@
 updater_dep += $(built_ota_tools)
 endif
 
-$(BUILT_TARGET_FILES_PACKAGE): $(updater_dep)
+$(BUILT_TARGET_FILES_DIR): $(updater_dep)
 
 # If we are using recovery as boot, output recovery files to BOOT/.
 # If we are moving recovery resources to vendor_boot, output recovery files to VENDOR_BOOT/.
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := BOOT
 else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
 else
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := RECOVERY
 endif
 
 ifeq ($(AB_OTA_UPDATER),true)
   ifdef OSRELEASED_DIRECTORY
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
   endif
 
   # Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
@@ -5645,34 +5880,48 @@
     echo "virtual_ab_compression_method=$(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD)" >> $(1))
   $(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
     echo "virtual_ab_retrofit=true" >> $(1))
+  $(if $(PRODUCT_VIRTUAL_AB_COW_VERSION), \
+    echo "virtual_ab_cow_version=$(PRODUCT_VIRTUAL_AB_COW_VERSION)" >> $(1))
+endef
+
+# Copy an image file to a directory and generate a block list map file from the image.
+# $(1): path of the image file
+# $(2): target out directory
+# $(3): name of the map file. skip generating map file if empty
+define copy-image-and-generate-map
+  mkdir -p $(2)
+  cp $(1) $(2)
+  $(if $(3),$(HOST_OUT_EXECUTABLES)/map_file_generator $(1) $(2)/$(3))
 endef
 
 # By conditionally including the dependency of the target files package on the
 # full system image deps, we speed up builds that do not build the system
 # image.
 ifdef BUILDING_SYSTEM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+  $(BUILT_TARGET_FILES_DIR): $(FULL_SYSTEMIMAGE_DEPS)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEMIMAGE)
 else
   # releasetools may need the system build.prop even when building a
   # system-image-less product.
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BUILD_PROP_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BUILD_PROP_TARGET)
 endif
 
 ifdef BUILDING_USERDATA_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_USERDATAIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_USERDATAIMAGE_FILES)
 endif
 
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEMOTHERIMAGE_TARGET)
 endif
 
 ifdef BUILDING_VENDOR_BOOT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
   # The vendor ramdisk may be built from the recovery ramdisk.
   ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
-    $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+    $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
   endif
 endif
 
@@ -5682,11 +5931,11 @@
   # commands in build-recoveryimage-target, which would touch the files under
   # TARGET_RECOVERY_OUT and race with packaging target-files.zip.
   ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-    $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+    $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
   else
-    $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_RECOVERYIMAGE_TARGET)
+    $(BUILT_TARGET_FILES_DIR): $(INSTALLED_RECOVERYIMAGE_TARGET)
   endif
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERYIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERYIMAGE_FILES)
 endif
 
 # Conditionally depend on the image files if the image is being built so the
@@ -5694,68 +5943,75 @@
 # if it is coming from a prebuilt.
 
 ifdef BUILDING_VENDOR_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDORIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDORIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_VENDORIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_VENDORIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDORIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDORIMAGE_TARGET)
 endif
 
 ifdef BUILDING_PRODUCT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_PRODUCTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_PRODUCTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_PRODUCTIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_PRODUCTIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_PRODUCTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_EXT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEM_EXTIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
 endif
 
 ifneq (,$(BUILDING_BOOT_IMAGE)$(BUILDING_INIT_BOOT_IMAGE))
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RAMDISK_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RAMDISK_FILES)
 endif  # BUILDING_BOOT_IMAGE != "" || BUILDING_INIT_BOOT_IMAGE != ""
 
 ifneq (,$(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES)))
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_ODMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_ODMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_VENDOR_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_VENDOR_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_ODM_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(BUILT_SYSTEM_DLKMIMAGE_TARGET)
 else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
 endif
 
 ifeq ($(BUILD_QEMU_IMAGES),true)
   MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
-  $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+  $(BUILT_TARGET_FILES_DIR): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
 endif
 
 ifdef BOARD_PREBUILT_BOOTLOADER
-$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+$(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTLOADER_MODULE)
 droidcore-unbundled: $(INSTALLED_BOOTLOADER_MODULE)
 endif
 
 # Depending on the various images guarantees that the underlying
 # directories are up-to-date.
-$(BUILT_TARGET_FILES_PACKAGE): \
+$(BUILT_TARGET_FILES_DIR): \
 	    $(INSTALLED_RADIOIMAGE_TARGET) \
 	    $(INSTALLED_RECOVERYIMAGE_TARGET) \
 	    $(INSTALLED_CACHEIMAGE_TARGET) \
@@ -5769,6 +6025,8 @@
 	    $(INSTALLED_RAMDISK_TARGET) \
 	    $(INSTALLED_DTBIMAGE_TARGET) \
 	    $(INSTALLED_2NDBOOTLOADER_TARGET) \
+	    $(BUILT_RAMDISK_16K_TARGET) \
+	    $(BUILT_KERNEL_16K_TARGET) \
 	    $(BOARD_PREBUILT_DTBOIMAGE) \
 	    $(BOARD_PREBUILT_RECOVERY_DTBOIMAGE) \
 	    $(BOARD_RECOVERY_ACPIO) \
@@ -5783,16 +6041,18 @@
 	    $(LPMAKE) \
 	    $(SELINUX_FC) \
 	    $(INSTALLED_MISC_INFO_TARGET) \
+	    $(INSTALLED_FASTBOOT_INFO_TARGET) \
 	    $(APKCERTS_FILE) \
 	    $(SOONG_APEX_KEYS_FILE) \
 	    $(SOONG_ZIP) \
 	    $(HOST_OUT_EXECUTABLES)/fs_config \
+	    $(HOST_OUT_EXECUTABLES)/map_file_generator \
 	    $(ADD_IMG_TO_TARGET_FILES) \
 	    $(MAKE_RECOVERY_PATCH) \
 	    $(BUILT_KERNEL_CONFIGS_FILE) \
 	    $(BUILT_KERNEL_VERSION_FILE) \
 	    | $(ACP)
-	@echo "Package target files: $@"
+	@echo "Building target files: $@"
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
@@ -6001,6 +6261,9 @@
 	$(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
 	$(hide) cp $(SELINUX_FC) $(zip_root)/META/file_contexts.bin
 	$(hide) cp $(INSTALLED_MISC_INFO_TARGET) $(zip_root)/META/misc_info.txt
+ifneq ($(INSTALLED_FASTBOOT_INFO_TARGET),)
+	$(hide) cp $(INSTALLED_FASTBOOT_INFO_TARGET) $(zip_root)/META/fastboot-info.txt
+endif
 ifneq ($(PRODUCT_SYSTEM_BASE_FS_PATH),)
 	$(hide) cp $(PRODUCT_SYSTEM_BASE_FS_PATH) \
 	  $(zip_root)/META/$(notdir $(PRODUCT_SYSTEM_BASE_FS_PATH))
@@ -6108,6 +6371,14 @@
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_DTBOIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
 endif # BOARD_PREBUILT_DTBOIMAGE
+ifdef BUILT_KERNEL_16K_TARGET
+	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
+	$(hide) cp $(BUILT_KERNEL_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
+endif # BUILT_KERNEL_16K_TARGET
+ifdef BUILT_RAMDISK_16K_TARGET
+	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
+	$(hide) cp $(BUILT_RAMDISK_16K_TARGET) $(zip_root)/PREBUILT_IMAGES/
+endif # BUILT_RAMDISK_16K_TARGET
 ifeq ($(BOARD_USES_PVMFWIMAGE),true)
 	$(hide) mkdir -p $(zip_root)/PREBUILT_IMAGES
 	$(hide) cp $(INSTALLED_PVMFWIMAGE_TARGET) $(zip_root)/PREBUILT_IMAGES/
@@ -6132,27 +6403,35 @@
 	@# Run fs_config on all the system, vendor, boot ramdisk,
 	@# and recovery ramdisk files in the zip, and save the output
 ifdef BUILDING_SYSTEM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEMIMAGE),$(zip_root)/IMAGES,system.map)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM,system/) > $(zip_root)/META/filesystem_config.txt
 endif
 ifdef BUILDING_VENDOR_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_VENDORIMAGE_TARGET),$(zip_root)/IMAGES,vendor.map)
 	$(hide) $(call fs_config,$(zip_root)/VENDOR,vendor/) > $(zip_root)/META/vendor_filesystem_config.txt
 endif
 ifdef BUILDING_PRODUCT_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_PRODUCTIMAGE_TARGET),$(zip_root)/IMAGES,product.map)
 	$(hide) $(call fs_config,$(zip_root)/PRODUCT,product/) > $(zip_root)/META/product_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_EXT_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEM_EXTIMAGE_TARGET),$(zip_root)/IMAGES,system_ext.map)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_EXT,system_ext/) > $(zip_root)/META/system_ext_filesystem_config.txt
 endif
 ifdef BUILDING_ODM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_ODMIMAGE_TARGET),$(zip_root)/IMAGES,odm.map)
 	$(hide) $(call fs_config,$(zip_root)/ODM,odm/) > $(zip_root)/META/odm_filesystem_config.txt
 endif
 ifdef BUILDING_VENDOR_DLKM_IMAGE
+	$(hide)$(call copy-image-and-generate-map,$(BUILT_VENDOR_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,vendor_dlkm.map)
 	$(hide) $(call fs_config,$(zip_root)/VENDOR_DLKM,vendor_dlkm/) > $(zip_root)/META/vendor_dlkm_filesystem_config.txt
 endif
 ifdef BUILDING_ODM_DLKM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_ODM_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,odm_dlkm.map)
 	$(hide) $(call fs_config,$(zip_root)/ODM_DLKM,odm_dlkm/) > $(zip_root)/META/odm_dlkm_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEM_DLKMIMAGE_TARGET),$(zip_root)/IMAGES,system_dlkm.map)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_DLKM,system_dlkm/) > $(zip_root)/META/system_dlkm_filesystem_config.txt
 endif
 	@# ROOT always contains the files for the root under normal boot.
@@ -6174,6 +6453,7 @@
 	$(hide) $(call fs_config,$(zip_root)/RECOVERY/RAMDISK,) > $(zip_root)/META/recovery_filesystem_config.txt
 endif
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
+	$(hide) $(call copy-image-and-generate-map,$(BUILT_SYSTEMOTHERIMAGE_TARGET),$(zip_root)/IMAGES)
 	$(hide) $(call fs_config,$(zip_root)/SYSTEM_OTHER,system/) > $(zip_root)/META/system_other_filesystem_config.txt
 endif
 	@# Metadata for compatibility verification.
@@ -6195,13 +6475,19 @@
 endif
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
-	$(hide) find $(zip_root)/META | sort >$@.list
-	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
+	$(hide) find $(zip_root)/META | sort >$@
+	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@
+
+$(BUILT_TARGET_FILES_PACKAGE): $(BUILT_TARGET_FILES_DIR)
+	@echo "Packaging target files: $@"
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -r $@.list
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
 
+.PHONY: target-files-dir
+target-files-dir: $(BUILT_TARGET_FILES_DIR)
+
 $(call declare-1p-container,$(BUILT_TARGET_FILES_PACKAGE),)
 $(call declare-container-license-deps,$(BUILT_TARGET_FILES_PACKAGE), $(INSTALLED_RADIOIMAGE_TARGET) \
             $(INSTALLED_RECOVERYIMAGE_TARGET) \
@@ -6229,15 +6515,17 @@
             $(LPMAKE) \
             $(SELINUX_FC) \
             $(INSTALLED_MISC_INFO_TARGET) \
+            $(INSTALLED_FASTBOOT_INFO_TARGET) \
             $(APKCERTS_FILE) \
             $(SOONG_APEX_KEYS_FILE) \
             $(HOST_OUT_EXECUTABLES)/fs_config \
+            $(HOST_OUT_EXECUTABLES)/map_file_generator \
             $(ADD_IMG_TO_TARGET_FILES) \
             $(MAKE_RECOVERY_PATCH) \
             $(BUILT_KERNEL_CONFIGS_FILE) \
             $(BUILT_KERNEL_VERSION_FILE),$(BUILT_TARGET_FILES_PACKAGE):)
 
-$(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
+$(call dist-for-goals-with-filenametag, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
 
 # -----------------------------------------------------------------
 # NDK Sysroot Package
@@ -6262,18 +6550,17 @@
 PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$(dir $(ZIP2ZIP)):$$PATH \
     $(OTA_FROM_TARGET_FILES) \
         --verbose \
-        --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
         --path $(HOST_OUT) \
         $(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
         $(2) \
-        $(BUILT_TARGET_FILES_PACKAGE) $(1)
+        $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) $(1)
 endef
 
 product_name := $(TARGET_PRODUCT)
 ifeq ($(TARGET_BUILD_TYPE),debug)
   product_name := $(product_name)_debug
 endif
-name := $(product_name)-ota-$(FILE_NAME_TAG)
+name := $(product_name)-ota
 
 INTERNAL_OTA_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 INTERNAL_OTA_METADATA := $(PRODUCT_OUT)/ota_metadata
@@ -6282,7 +6569,7 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 $(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
 
@@ -6293,7 +6580,7 @@
 otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
 
 ifeq ($(BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE),true)
-name := $(product_name)-ota-retrofit-$(FILE_NAME_TAG)
+name := $(product_name)-ota-retrofit
 
 INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
@@ -6314,11 +6601,11 @@
 endif # BOARD_BUILD_RETROFIT_DYNAMIC_PARTITIONS_OTA_PACKAGE
 
 ifneq ($(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST),)
-name := $(product_name)-partial-ota-$(FILE_NAME_TAG)
+name := $(product_name)-partial-ota
 
 INTERNAL_OTA_PARTIAL_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
 	@echo "Package partial OTA: $@"
 	$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
 
@@ -6414,9 +6701,9 @@
 endif
 
 # The path to the zip file containing binaries with symbols.
-SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name)-symbols-$(FILE_NAME_TAG).zip
+SYMBOLS_ZIP := $(PRODUCT_OUT)/$(name)-symbols.zip
 # The path to a file containing mappings from elf IDs to filenames.
-SYMBOLS_MAPPING := $(PRODUCT_OUT)/$(name)-symbols-mapping-$(FILE_NAME_TAG).textproto
+SYMBOLS_MAPPING := $(PRODUCT_OUT)/$(name)-symbols-mapping.textproto
 .KATI_READONLY := SYMBOLS_ZIP SYMBOLS_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
@@ -6491,7 +6778,7 @@
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-apps-$(FILE_NAME_TAG)
+name := $(name)-apps
 
 APPS_ZIP := $(PRODUCT_OUT)/$(name).zip
 $(APPS_ZIP): $(FULL_SYSTEMIMAGE_DEPS)
@@ -6542,9 +6829,9 @@
 #
 
 # The path to the zip file containing proguard dictionaries.
-PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-$(FILE_NAME_TAG).zip
+PROGUARD_DICT_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict.zip
 # The path to the zip file containing mappings from dictionary hashes to filenames.
-PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping-$(FILE_NAME_TAG).textproto
+PROGUARD_DICT_MAPPING := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-dict-mapping.textproto
 .KATI_READONLY := PROGUARD_DICT_ZIP PROGUARD_DICT_MAPPING
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
@@ -6573,7 +6860,7 @@
 #------------------------------------------------------------------
 # A zip of Proguard usage files.
 #
-PROGUARD_USAGE_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage-$(FILE_NAME_TAG).zip
+PROGUARD_USAGE_ZIP := $(PRODUCT_OUT)/$(TARGET_PRODUCT)-proguard-usage.zip
 # For apps_only build we'll establish the dependency later in build/make/core/main.mk.
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
 $(PROGUARD_USAGE_ZIP): \
@@ -6641,7 +6928,7 @@
 # For real devices and for dist builds, build super image from target files to an intermediate directory.
 INTERNAL_SUPERIMAGE_DIST_TARGET := $(call intermediates-dir-for,PACKAGING,super.img)/super.img
 $(INTERNAL_SUPERIMAGE_DIST_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
-$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_PACKAGE) $(BUILD_SUPER_IMAGE)
+$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_DIR) $(BUILD_SUPER_IMAGE)
 	$(call pretty,"Target super fs image from target files: $@")
 	PATH=$(dir $(LPMAKE)):$$PATH \
 	    $(BUILD_SUPER_IMAGE) -v $(extracted_input_target_files) $@
@@ -6750,7 +7037,7 @@
 ifeq ($(TARGET_BUILD_TYPE),debug)
   name := $(name)_debug
 endif
-name := $(name)-img-$(FILE_NAME_TAG)
+name := $(name)-img
 
 INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 
@@ -6766,7 +7053,7 @@
 
 .PHONY: updatepackage
 updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
-$(call dist-for-goals,updatepackage,$(INTERNAL_UPDATE_PACKAGE_TARGET))
+$(call dist-for-goals-with-filenametag,updatepackage,$(INTERNAL_UPDATE_PACKAGE_TARGET))
 
 
 # -----------------------------------------------------------------
@@ -6895,7 +7182,7 @@
         $(INSTALLED_SYSTEMIMAGE_TARGET) \
         $(INSTALLED_USERDATAIMAGE_TARGET)
 
-name := $(TARGET_PRODUCT)-emulator-$(FILE_NAME_TAG)
+name := $(TARGET_PRODUCT)-emulator
 
 INTERNAL_EMULATOR_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 
@@ -6923,7 +7210,7 @@
 ifneq ($(HOST_OS),linux)
   $(error Building the monolithic SDK is only supported on Linux)
 endif
-sdk_name := android-sdk_$(FILE_NAME_TAG)
+sdk_name := android-sdk
 INTERNAL_SDK_HOST_OS_NAME := linux-$(SDK_HOST_ARCH)
 sdk_name := $(sdk_name)_$(INTERNAL_SDK_HOST_OS_NAME)
 
@@ -7097,17 +7384,9 @@
 haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
 $(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
 $(call dist-for-goals,haiku,$(PRODUCT_OUT)/module-info.json)
-
-.PHONY: haiku-java-device
-haiku-java-device: $(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_DEVICE_TARGETS)
-$(call dist-for-goals,haiku-java-device,$(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES))
-$(call dist-for-goals,haiku-java-device,$(PRODUCT_OUT)/module-info.json)
-
-.PHONY: haiku-java-host
-haiku-java-host: $(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_HOST_TARGETS)
-$(call dist-for-goals,haiku-java-host,$(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES))
-$(call dist-for-goals,haiku-java-host,$(PRODUCT_OUT)/module-info.json)
-
+.PHONY: haiku-java
+haiku-java: $(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_TARGETS)
+$(call dist-for-goals,haiku-java,$(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES))
 .PHONY: haiku-rust
 haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
 $(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
diff --git a/core/all_versions.bzl b/core/all_versions.bzl
new file mode 100644
index 0000000..33da673
--- /dev/null
+++ b/core/all_versions.bzl
@@ -0,0 +1,23 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+_all_versions = ["OPR1", "OPD1", "OPD2", "OPM1", "OPM2", "PPR1", "PPD1", "PPD2", "PPM1", "PPM2", "QPR1"] + [
+    version + subversion
+    for version in ["Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]
+    for subversion in ["P1A", "P1B", "P2A", "P2B", "D1A", "D1B", "D2A", "D2B", "Q1A", "Q1B", "Q2A", "Q2B", "Q3A", "Q3B"]
+]
+
+variables_to_export_to_make = {
+    "ALL_VERSIONS": _all_versions,
+}
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index bf113ee..5dba2d1 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -27,6 +27,7 @@
 # Add variables to the namespace below:
 
 $(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_MEDIASERVER)
+$(call add_soong_config_var,ANDROID,TARGET_DYNAMIC_64_32_DRMSERVER)
 $(call add_soong_config_var,ANDROID,TARGET_ENABLE_MEDIADRM_64)
 $(call add_soong_config_var,ANDROID,IS_TARGET_MIXED_SEPOLICY)
 ifeq ($(IS_TARGET_MIXED_SEPOLICY),true)
@@ -41,6 +42,37 @@
 # MODULE_BUILD_FROM_SOURCE.
 BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
 
+ifneq ($(SANITIZE_TARGET)$(EMMA_INSTRUMENT_FRAMEWORK),)
+  # Always use sources when building the framework with Java coverage or
+  # sanitized builds as they both require purpose built prebuilts which we do
+  # not provide.
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ART does not provide linux_bionic variants needed for products that
+# set HOST_CROSS_OS=linux_bionic.
+ifeq (linux_bionic,${HOST_CROSS_OS})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ART does not provide host side arm64 variants needed for products that
+# set HOST_CROSS_ARCH=arm64.
+ifeq (arm64,${HOST_CROSS_ARCH})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# TV based devices do not seem to work with prebuilts, so build from source
+# for now and fix in a follow up.
+ifneq (,$(filter tv,$(subst $(comma),$(space),${PRODUCT_CHARACTERISTICS})))
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
+# ATV based devices do not seem to work with prebuilts, so build from source
+# for now and fix in a follow up.
+ifneq (,${PRODUCT_IS_ATV})
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
 ifneq (,$(MODULE_BUILD_FROM_SOURCE))
   # Keep an explicit setting.
 else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
@@ -70,6 +102,13 @@
 endif
 
 $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
+ifdef ART_DEBUG_OPT_FLAG
+$(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
+endif
+
+ifdef TARGET_BOARD_AUTO
+  $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
+endif
 
 # Ensure that those mainline modules who have individually toggleable prebuilts
 # are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
@@ -77,6 +116,7 @@
 INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
   bluetooth \
   permission \
+  rkpd \
   uwb \
   wifi \
 
@@ -106,10 +146,18 @@
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
 
+# Disable Compose in SystemUI by default.
+SYSTEMUI_USE_COMPOSE ?= false
+$(call add_soong_config_var,ANDROID,SYSTEMUI_USE_COMPOSE)
+
 ifdef PRODUCT_AVF_ENABLED
 $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
 endif
 
+ifdef PRODUCT_AVF_KERNEL_MODULES_ENABLED
+$(call add_soong_config_var_value,ANDROID,avf_kernel_modules_enabled,$(PRODUCT_AVF_KERNEL_MODULES_ENABLED))
+endif
+
 # Enable system_server optimizations by default unless explicitly set or if
 # there may be dependent runtime jars.
 # TODO(b/240588226): Remove the off-by-default exceptions after handling
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index eb429cd..9fab44d 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -302,3 +302,7 @@
 
 endif # LOCAL_PACKAGE_SPLITS
 
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ec5a21e..c61c653 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -190,18 +190,6 @@
 $(call pretty-error,unusual tags: $(filter-out tests optional samples,$(my_module_tags)))
 endif
 
-# Add implicit tags.
-#
-# If the local directory or one of its parents contains a MODULE_LICENSE_GPL
-# file, tag the module as "gnu".  Search for "*_GPL*", "*_LGPL*" and "*_MPL*"
-# so that we can also find files like MODULE_LICENSE_GPL_AND_AFL
-#
-gpl_license_file := $(call find-parent-file,$(LOCAL_PATH),MODULE_LICENSE*_GPL* MODULE_LICENSE*_MPL* MODULE_LICENSE*_LGPL*)
-ifneq ($(gpl_license_file),)
-  my_module_tags += gnu
-  ALL_GPL_MODULE_LICENSE_FILES += $(gpl_license_file)
-endif
-
 LOCAL_MODULE_CLASS := $(strip $(LOCAL_MODULE_CLASS))
 ifneq ($(words $(LOCAL_MODULE_CLASS)),1)
   $(error $(LOCAL_PATH): LOCAL_MODULE_CLASS must contain exactly one word, not "$(LOCAL_MODULE_CLASS)")
@@ -949,6 +937,8 @@
     $(ALL_MODULES.$(my_register_name).CHECKED) $(my_checked_module)
 ALL_MODULES.$(my_register_name).BUILT := \
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
+ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE := \
+    $(ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE) $(LOCAL_SOONG_MODULE_TYPE)
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).TARGET_BUILT := \
     $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
@@ -1015,6 +1005,9 @@
 ALL_MODULES.$(my_register_name).SHARED_LIBS := \
     $(ALL_MODULES.$(my_register_name).SHARED_LIBS) $(LOCAL_SHARED_LIBRARIES)
 
+ALL_MODULES.$(my_register_name).STATIC_LIBS := \
+    $(ALL_MODULES.$(my_register_name).STATIC_LIBS) $(LOCAL_STATIC_LIBRARIES)
+
 ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS := \
     $(ALL_MODULES.$(my_register_name).SYSTEM_SHARED_LIBS) $(LOCAL_SYSTEM_SHARED_LIBRARIES)
 
@@ -1240,3 +1233,8 @@
 ###########################################################
 
 include $(BUILD_NOTICE_FILE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
diff --git a/core/binary.mk b/core/binary.mk
index 6f1d814..e2e5be4 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -168,7 +168,6 @@
 endif
 endif
 
-my_ndk_sysroot :=
 my_ndk_sysroot_include :=
 my_ndk_sysroot_lib :=
 my_api_level := 10000
@@ -183,11 +182,7 @@
   # Make sure we've built the NDK.
   my_additional_dependencies += $(SOONG_OUT_DIR)/ndk_base.timestamp
 
-  ifneq (,$(filter arm64 x86_64,$(my_arch)))
-    my_min_sdk_version := 21
-  else
-    my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
-  endif
+  my_min_sdk_version := $(MIN_SUPPORTED_SDK_VERSION)
 
   # Historically we've just set up a bunch of symlinks in prebuilts/ndk to map
   # missing API levels to existing ones where necessary, but we're not doing
@@ -200,38 +195,19 @@
 
   my_ndk_crt_version := $(my_ndk_api)
 
-  my_ndk_hist_api := $(my_ndk_api)
-  ifeq ($(my_ndk_api),current)
-    # The last API level supported by the old prebuilt NDKs.
-    my_ndk_hist_api := 24
-  else
+  ifneq ($(my_ndk_api),current)
     my_api_level := $(my_ndk_api)
   endif
 
   my_ndk_source_root := \
       $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/sources
-  my_ndk_sysroot := \
-    $(HISTORICAL_NDK_VERSIONS_ROOT)/$(LOCAL_NDK_VERSION)/platforms/android-$(my_ndk_hist_api)/arch-$(my_arch)
   my_built_ndk := $(SOONG_OUT_DIR)/ndk
   my_ndk_triple := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_TRIPLE)
   my_ndk_sysroot_include := \
       $(my_built_ndk)/sysroot/usr/include \
       $(my_built_ndk)/sysroot/usr/include/$(my_ndk_triple) \
-      $(my_ndk_sysroot)/usr/include \
 
-  # x86_64 is a multilib toolchain, so their libraries are
-  # installed in /usr/lib64. Aarch64, on the other hand, is not a multilib
-  # compiler, so its libraries are in /usr/lib.
-  ifneq (,$(filter x86_64,$(my_arch)))
-    my_ndk_libdir_name := lib64
-  else
-    my_ndk_libdir_name := lib
-  endif
-
-  my_ndk_platform_dir := \
-      $(my_built_ndk)/platforms/android-$(my_ndk_api)/arch-$(my_arch)
-  my_built_ndk_libs := $(my_ndk_platform_dir)/usr/$(my_ndk_libdir_name)
-  my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/$(my_ndk_libdir_name)
+  my_ndk_sysroot_lib := $(my_built_ndk)/sysroot/usr/lib/$(my_ndk_triple)/$(my_ndk_api)
 
   # The bionic linker now has support for packed relocations and gnu style
   # hashes (which are much faster!), but shipping to older devices requires
@@ -1428,7 +1404,6 @@
 my_ndk_shared_libraries_fullpath := \
     $(foreach _lib,$(my_ndk_shared_libraries),\
         $(if $(filter $(NDK_KNOWN_LIBS),$(_lib)),\
-            $(my_built_ndk_libs)/$(_lib)$(so_suffix),\
             $(my_ndk_sysroot_lib)/$(_lib)$(so_suffix)))
 
 built_shared_libraries += \
@@ -1597,15 +1572,10 @@
 else ifdef LOCAL_SDK_VERSION
   my_target_global_c_includes :=
   my_target_global_c_system_includes := $(my_ndk_stl_include_path) $(my_ndk_sysroot_include)
-else ifdef BOARD_VNDK_VERSION
-  my_target_global_c_includes := $(SRC_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
-    $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
 else
   my_target_global_c_includes := $(SRC_HEADERS) \
     $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
-  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) $(TARGET_OUT_HEADERS) \
+  my_target_global_c_system_includes := $(SRC_SYSTEM_HEADERS) \
     $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_SYSTEM_INCLUDES)
 endif
 
@@ -1692,14 +1662,8 @@
 
 ifdef LOCAL_USE_VNDK
   imported_includes += $(call intermediates-dir-for,HEADER_LIBRARIES,device_kernel_headers,$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
-else ifdef LOCAL_SDK_VERSION
-  # Apps shouldn't need device-specific kernel headers
-else ifdef BOARD_VNDK_VERSION
-  # For devices building with the VNDK, only the VNDK gets device-specific kernel headers by default
-  # In soong, it's entirely opt-in
 else
-  # For older non-VNDK builds, continue adding in kernel headers to everything like we used to
-  imported_includes += $(call intermediates-dir-for,HEADER_LIBRARIES,device_kernel_headers,$(my_kind),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
+  # everything else should manually specify headers
 endif
 
 imported_includes := $(strip \
diff --git a/core/board_config.mk b/core/board_config.mk
index 70c91a8..663ec7c 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -174,6 +174,7 @@
 
 
 _build_broken_var_list := \
+  BUILD_BROKEN_PLUGIN_VALIDATION \
   BUILD_BROKEN_CLANG_PROPERTY \
   BUILD_BROKEN_CLANG_ASFLAGS \
   BUILD_BROKEN_CLANG_CFLAGS \
@@ -188,6 +189,7 @@
   BUILD_BROKEN_PREBUILT_ELF_FILES \
   BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
   BUILD_BROKEN_USES_NETWORK \
+  BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \
   BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
 
@@ -255,7 +257,7 @@
   endif
 
   $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
-    $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
+    $(OUT_DIR)/rbcrun --mode=rbc $(OUT_DIR)/rbc/boardlauncher.rbc)
   ifneq ($(.SHELLSTATUS),0)
     $(error board configuration runner failed: $(.SHELLSTATUS))
   endif
@@ -918,12 +920,6 @@
 endif
 .KATI_READONLY := BOARD_USES_PVMFWIMAGE
 
-BUILDING_PVMFW_IMAGE :=
-ifeq ($(PRODUCT_BUILD_PVMFW_IMAGE),true)
-  BUILDING_PVMFW_IMAGE := true
-endif
-.KATI_READONLY := BUILDING_PVMFW_IMAGE
-
 ###########################################
 # Ensure consistency among TARGET_RECOVERY_UPDATER_LIBS, AB_OTA_UPDATER, and PRODUCT_OTA_FORCE_NON_AB_PACKAGE.
 TARGET_RECOVERY_UPDATER_LIBS ?=
@@ -972,42 +968,30 @@
   $(if $(wildcard $(vndk_path)/*/Android.bp),,$(error VNDK version $(1) not found))
 endef
 
-ifdef BOARD_VNDK_VERSION
-  ifeq ($(BOARD_VNDK_VERSION),$(PLATFORM_VNDK_VERSION))
-    $(error BOARD_VNDK_VERSION is equal to PLATFORM_VNDK_VERSION; use BOARD_VNDK_VERSION := current)
-  endif
-  ifneq ($(BOARD_VNDK_VERSION),current)
-    $(call check_vndk_version,$(BOARD_VNDK_VERSION))
-  endif
-  TARGET_VENDOR_TEST_SUFFIX := /vendor
-else
-  TARGET_VENDOR_TEST_SUFFIX :=
+ifeq ($(BOARD_VNDK_VERSION),$(PLATFORM_VNDK_VERSION))
+  $(error BOARD_VNDK_VERSION is equal to PLATFORM_VNDK_VERSION; use BOARD_VNDK_VERSION := current)
 endif
-
-# If PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is set,
-# BOARD_VNDK_VERSION must be set because PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY
-# is a enforcement of inter-partition dependency, and it doesn't have any meaning
-# when BOARD_VNDK_VERSION isn't set.
-ifeq ($(PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY),true)
-  ifeq ($(BOARD_VNDK_VERSION),)
-    $(error BOARD_VNDK_VERSION must be set when PRODUCT_ENFORCE_INTER_PARTITION_JAVA_SDK_LIBRARY is true)
-  endif
+ifneq ($(BOARD_VNDK_VERSION),current)
+  $(call check_vndk_version,$(BOARD_VNDK_VERSION))
 endif
+TARGET_VENDOR_TEST_SUFFIX := /vendor
 
 ###########################################
-# APEXes are by default flattened, i.e. non-updatable, if not building unbundled
-# apps. It can be unflattened (and updatable) by inheriting from
-# updatable_apex.mk
+# APEXes are by default not flattened, i.e. updatable.
 #
 # APEX flattening can also be forcibly enabled (resp. disabled) by
 # setting OVERRIDE_TARGET_FLATTEN_APEX to true (resp. false), e.g. by
 # setting the OVERRIDE_TARGET_FLATTEN_APEX environment variable.
 ifdef OVERRIDE_TARGET_FLATTEN_APEX
   TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
-else
-  ifeq (,$(TARGET_BUILD_APPS)$(TARGET_FLATTEN_APEX))
-    TARGET_FLATTEN_APEX := true
-  endif
+endif
+
+# TODO(b/278826656) Remove the following message
+ifeq (true,$(TARGET_FLATTEN_APEX))
+  $(warning ********************************************************************************)
+  $(warning Flattened APEX will be deprecated soon. Please stop using flattened APEX and use)
+  $(warning "image" APEX instead.)
+  $(warning ********************************************************************************)
 endif
 
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
index ddeb0d7..8289bf2 100644
--- a/core/board_config_wifi.mk
+++ b/core/board_config_wifi.mk
@@ -74,4 +74,10 @@
 endif
 ifdef WIFI_AVOID_IFACE_RESET_MAC_CHANGE
     $(call soong_config_set,wifi,avoid_iface_reset_mac_change,true)
+endif
+ifdef WIFI_SKIP_STATE_TOGGLE_OFF_ON_FOR_NAN
+    $(call soong_config_set,wifi,wifi_skip_state_toggle_off_on_for_nan,true)
+endif
+ifeq ($(strip $(TARGET_USES_AOSP_FOR_WLAN)),true)
+    $(call soong_config_set,wifi,target_uses_aosp_for_wlan,true)
 endif
\ No newline at end of file
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 5576785..f41f1b7 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -33,8 +33,6 @@
 
 # CTS-specific config.
 -include cts/build/config.mk
-# VTS-specific config.
--include test/vts/tools/vts-tradefed/build/config.mk
 # device-tests-specific-config.
 -include tools/tradefederation/build/suites/device-tests/config.mk
 # general-tests-specific-config.
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index e325760..bb7ba1b 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -293,6 +293,7 @@
 LOCAL_SOONG_LICENSE_METADATA :=
 LOCAL_SOONG_LINK_TYPE :=
 LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_MODULE_TYPE :=
 LOCAL_SOONG_PROGUARD_DICT :=
 LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
@@ -502,6 +503,7 @@
 
 # Robolectric variables
 LOCAL_INSTRUMENT_SOURCE_DIRS :=
+LOCAL_INSTRUMENT_SRCJARS :=
 LOCAL_ROBOTEST_FAILURE_FATAL :=
 LOCAL_ROBOTEST_FILES :=
 LOCAL_ROBOTEST_TIMEOUT :=
diff --git a/core/combo/arch/arm64/armv9-a.mk b/core/combo/arch/arm64/armv9-a.mk
new file mode 100644
index 0000000..de0760a
--- /dev/null
+++ b/core/combo/arch/arm64/armv9-a.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the new armv9-a Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/config.mk b/core/config.mk
index 025a3a1..5191917 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -42,6 +42,9 @@
 # Mark variables deprecated/obsolete
 CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
 .KATI_READONLY := CHANGES_URL
+$(KATI_deprecated_var TARGET_USES_64_BIT_BINDER,All devices use 64-bit binder by default now. Uses of TARGET_USES_64_BIT_BINDER should be removed.)
+$(KATI_deprecated_var PRODUCT_SEPOLICY_SPLIT,All devices are built with split sepolicy.)
+$(KATI_deprecated_var PRODUCT_SEPOLICY_SPLIT_OVERRIDE,All devices are built with split sepolicy.)
 $(KATI_obsolete_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
 $(KATI_obsolete_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
 $(KATI_obsolete_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
@@ -166,6 +169,7 @@
 $(KATI_obsolete_var PRODUCT_SUPPORTS_BOOT_SIGNER,VB 1.0 and related variables are no longer supported)
 $(KATI_obsolete_var PRODUCT_VERITY_SIGNING_KEY,VB 1.0 and related variables are no longer supported)
 $(KATI_obsolete_var BOARD_PREBUILT_PVMFWIMAGE,pvmfw.bin is now built in AOSP and custom versions are no longer supported)
+$(KATI_obsolete_var BUILDING_PVMFW_IMAGE,BUILDING_PVMFW_IMAGE is no longer used)
 $(KATI_obsolete_var BOARD_BUILD_SYSTEM_ROOT_IMAGE)
 
 # Used to force goals to build.  Only use for conditionally defined goals.
@@ -232,6 +236,7 @@
 BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
 
 BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
+BUILD_SBOM_GEN :=$= $(BUILD_SYSTEM)/sbom.mk
 
 include $(BUILD_SYSTEM)/deprecation.mk
 
@@ -268,7 +273,7 @@
 # Ex: $(call add_soong_config_namespace,acme)
 
 define add_soong_config_namespace
-$(eval SOONG_CONFIG_NAMESPACES += $1) \
+$(eval SOONG_CONFIG_NAMESPACES += $(strip $1)) \
 $(eval SOONG_CONFIG_$(strip $1) :=)
 endef
 
@@ -278,8 +283,8 @@
 # $1 is the namespace. $2 is the list of variables.
 # Ex: $(call add_soong_config_var,acme,COOL_FEATURE_A COOL_FEATURE_B)
 define add_soong_config_var
-$(eval SOONG_CONFIG_$(strip $1) += $2) \
-$(foreach v,$(strip $2),$(eval SOONG_CONFIG_$(strip $1)_$v := $($v)))
+$(eval SOONG_CONFIG_$(strip $1) += $(strip $2)) \
+$(foreach v,$(strip $2),$(eval SOONG_CONFIG_$(strip $1)_$v := $(strip $($v))))
 endef
 
 # The add_soong_config_var_value function defines a make variable and also adds
@@ -288,7 +293,7 @@
 # Ex: $(call add_soong_config_var_value,acme,COOL_FEATURE,true)
 
 define add_soong_config_var_value
-$(eval $2 := $3) \
+$(eval $(strip $2) := $(strip $3)) \
 $(call add_soong_config_var,$1,$2)
 endef
 
@@ -296,8 +301,8 @@
 #
 # internal utility to define a namespace and a variable in it.
 define soong_config_define_internal
-$(if $(filter $1,$(SOONG_CONFIG_NAMESPACES)),,$(eval SOONG_CONFIG_NAMESPACES:=$(SOONG_CONFIG_NAMESPACES) $1)) \
-$(if $(filter $2,$(SOONG_CONFIG_$(strip $1))),,$(eval SOONG_CONFIG_$(strip $1):=$(SOONG_CONFIG_$(strip $1)) $2))
+$(if $(filter $1,$(SOONG_CONFIG_NAMESPACES)),,$(eval SOONG_CONFIG_NAMESPACES:=$(SOONG_CONFIG_NAMESPACES) $(strip $1))) \
+$(if $(filter $2,$(SOONG_CONFIG_$(strip $1))),,$(eval SOONG_CONFIG_$(strip $1):=$(SOONG_CONFIG_$(strip $1)) $(strip $2)))
 endef
 
 # soong_config_set defines the variable in the given Soong config namespace
@@ -306,7 +311,7 @@
 # Ex: $(call soong_config_set,acme,COOL_FEATURE,true)
 define soong_config_set
 $(call soong_config_define_internal,$1,$2) \
-$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$3)
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(strip $3))
 endef
 
 # soong_config_append appends to the value of the variable in the given Soong
@@ -315,7 +320,7 @@
 # $1 is the namespace, $2 is the variable name, $3 is the value
 define soong_config_append
 $(call soong_config_define_internal,$1,$2) \
-$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(SOONG_CONFIG_$(strip $1)_$(strip $2)) $3)
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(SOONG_CONFIG_$(strip $1)_$(strip $2)) $(strip $3))
 endef
 
 # soong_config_append gets to the value of the variable in the given Soong
@@ -351,11 +356,66 @@
 endif
 -include $(ANDROID_BUILDSPEC)
 
+# Starting in Android U, non-VNDK devices not supported
+# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
+# letting upstream know it's important to you, we may do cleanup which breaks this
+# significantly. Please let us know if you are changing this.
+ifndef BOARD_VNDK_VERSION
+# READ WARNING - DO NOT CHANGE
+BOARD_VNDK_VERSION := current
+# READ WARNING - DO NOT CHANGE
+endif
+
 # ---------------------------------------------------------------
 # Define most of the global variables.  These are the ones that
 # are specific to the user's build configuration.
 include $(BUILD_SYSTEM)/envsetup.mk
 
+# Returns true if it is a low memory device, otherwise it returns false.
+define is-low-mem-device
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_PROPERTY_OVERRIDES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_COMPATIBLE_PROPERTY)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_SYSTEM_DEFAULT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_SYSTEM_EXT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_PRODUCT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_VENDOR_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_ODM_PROPERTIES)),true,false)))))))))
+endef
+
+# Get the board API level.
+board_api_level := $(PLATFORM_SDK_VERSION)
+ifdef BOARD_API_LEVEL
+  board_api_level := $(BOARD_API_LEVEL)
+else ifdef BOARD_SHIPPING_API_LEVEL
+  # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
+  board_api_level := $(BOARD_SHIPPING_API_LEVEL)
+endif
+
+# Calculate the VSR vendor API level.
+vsr_vendor_api_level := $(board_api_level)
+
+ifdef PRODUCT_SHIPPING_API_LEVEL
+  vsr_vendor_api_level := $(call math_min,$(PRODUCT_SHIPPING_API_LEVEL),$(board_api_level))
+endif
+
+# Set TARGET_MAX_PAGE_SIZE_SUPPORTED.
+ifdef PRODUCT_MAX_PAGE_SIZE_SUPPORTED
+  TARGET_MAX_PAGE_SIZE_SUPPORTED := $(PRODUCT_MAX_PAGE_SIZE_SUPPORTED)
+else ifeq ($(strip $(call is-low-mem-device)),true)
+  # Low memory device will have 4096 binary alignment.
+  TARGET_MAX_PAGE_SIZE_SUPPORTED := 4096
+else
+  # The default binary alignment for userspace is 4096.
+  TARGET_MAX_PAGE_SIZE_SUPPORTED := 4096
+  # When VSR vendor API level >= 34, binary alignment will be 65536.
+  ifeq ($(call math_gt_or_eq,$(vsr_vendor_api_level),34),true)
+      TARGET_MAX_PAGE_SIZE_SUPPORTED := 65536
+  endif
+endif
+.KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
+
 # Pruned directory options used when using findleaves.py
 # See envsetup.mk for a description of SCAN_EXCLUDE_DIRS
 FIND_LEAVES_EXCLUDES := $(addprefix --prune=, $(SCAN_EXCLUDE_DIRS) .repo .git)
@@ -498,8 +558,13 @@
 
 TARGET_BUILD_USE_PREBUILT_SDKS :=
 DISABLE_PREOPT :=
+DISABLE_PREOPT_BOOT_IMAGES :=
 ifneq (,$(TARGET_BUILD_APPS)$(TARGET_BUILD_UNBUNDLED_IMAGE))
   DISABLE_PREOPT := true
+  # VSDK builds perform dexpreopt during merge_target_files build step.
+  ifneq (true,$(BUILDING_WITH_VSDK))
+    DISABLE_PREOPT_BOOT_IMAGES := true
+  endif
 endif
 ifeq (true,$(TARGET_BUILD_UNBUNDLED))
   ifneq (true,$(UNBUNDLED_BUILD_SDKS_FROM_SOURCE))
@@ -510,6 +575,7 @@
 .KATI_READONLY := \
   TARGET_BUILD_USE_PREBUILT_SDKS \
   DISABLE_PREOPT \
+  DISABLE_PREOPT_BOOT_IMAGES \
 
 prebuilt_sdk_tools := prebuilts/sdk/tools
 prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
@@ -581,7 +647,6 @@
 endif
 PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
 NANOPB_SRCS := $(HOST_OUT_EXECUTABLES)/protoc-gen-nanopb
-VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
 MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
 MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
 LZ4 := $(HOST_OUT_EXECUTABLES)/lz4$(HOST_EXECUTABLE_SUFFIX)
@@ -618,7 +683,11 @@
 LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
 ADD_IMG_TO_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/add_img_to_target_files$(HOST_EXECUTABLE_SUFFIX)
 BUILD_IMAGE := $(HOST_OUT_EXECUTABLES)/build_image$(HOST_EXECUTABLE_SUFFIX)
+ifeq (,$(strip $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)))
 BUILD_SUPER_IMAGE := $(HOST_OUT_EXECUTABLES)/build_super_image$(HOST_EXECUTABLE_SUFFIX)
+else
+BUILD_SUPER_IMAGE := $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)
+endif
 IMG_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/img_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 MAKE_RECOVERY_PATCH := $(HOST_OUT_EXECUTABLES)/make_recovery_patch$(HOST_EXECUTABLE_SUFFIX)
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
@@ -641,6 +710,8 @@
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
 
+GEN_SBOM := $(HOST_OUT_EXECUTABLES)/generate-sbom
+
 FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
 FINDBUGS := $(FINDBUGS_DIR)/findbugs
 
@@ -680,7 +751,6 @@
 
 requirements := \
     PRODUCT_TREBLE_LINKER_NAMESPACES \
-    PRODUCT_SEPOLICY_SPLIT \
     PRODUCT_ENFORCE_VINTF_MANIFEST \
     PRODUCT_NOTICE_SPLIT
 
@@ -695,14 +765,6 @@
 PRODUCT_FULL_TREBLE_OVERRIDE ?=
 $(foreach req,$(requirements),$(eval $(req)_OVERRIDE ?=))
 
-ifneq ($(PRODUCT_SEPOLICY_SPLIT),true)
-# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
-# letting upstream know it's important to you, we may do cleanup which breaks this
-# significantly. Please let us know if you are changing this.
-# TODO(b/257176017) - unsplit sepolicy is no longer supported
-PRODUCT_SEPOLICY_SPLIT := true
-endif
-
 # TODO(b/114488870): disallow PRODUCT_FULL_TREBLE_OVERRIDE from being used.
 .KATI_READONLY := \
     PRODUCT_FULL_TREBLE_OVERRIDE \
@@ -723,24 +785,6 @@
   BOARD_PROPERTY_OVERRIDES_SPLIT_ENABLED ?= true
 endif
 
-# Starting in Android U, non-VNDK devices not supported
-# WARNING: DO NOT CHANGE: if you are downstream of AOSP, and you change this, without
-# letting upstream know it's important to you, we may do cleanup which breaks this
-# significantly. Please let us know if you are changing this.
-ifndef BOARD_VNDK_VERSION
-# READ WARNING - DO NOT CHANGE
-BOARD_VNDK_VERSION := current
-# READ WARNING - DO NOT CHANGE
-endif
-
-ifdef PRODUCT_PRODUCT_VNDK_VERSION
-  ifndef BOARD_VNDK_VERSION
-    # VNDK for product partition is not available unless BOARD_VNDK_VERSION
-    # defined.
-    $(error PRODUCT_PRODUCT_VNDK_VERSION cannot be defined without defining BOARD_VNDK_VERSION)
-  endif
-endif
-
 # Set BOARD_SYSTEMSDK_VERSIONS to the latest SystemSDK version starting from P-launching
 # devices if unset.
 ifndef BOARD_SYSTEMSDK_VERSIONS
@@ -776,13 +820,6 @@
   ifneq ($(call numbers_less_than,$(min_systemsdk_version),$(BOARD_SYSTEMSDK_VERSIONS)),)
     $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to BOARD_API_LEVEL, BOARD_SHIPPING_API_LEVEL or PRODUCT_SHIPPING_API_LEVEL ($(min_systemsdk_version)))
   endif
-  ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),28),)
-    ifneq ($(TARGET_IS_64_BIT), true)
-      ifneq ($(TARGET_USES_64_BIT_BINDER), true)
-        $(error When PRODUCT_SHIPPING_API_LEVEL >= 28, TARGET_USES_64_BIT_BINDER must be true)
-      endif
-    endif
-  endif
   ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
     ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
@@ -807,6 +844,7 @@
 .KATI_READONLY := MAINLINE_SEPOLICY_DEV_CERTIFICATES
 
 BUILD_NUMBER_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_number.txt)
+BUILD_HOSTNAME_FROM_FILE := $$(cat $(SOONG_OUT_DIR)/build_hostname.txt)
 BUILD_DATETIME_FROM_FILE := $$(cat $(BUILD_DATETIME_FILE))
 
 # SEPolicy versions
@@ -856,7 +894,6 @@
 
 # A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
 PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
-    28.0 \
     29.0 \
     30.0 \
     31.0 \
@@ -1159,13 +1196,6 @@
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_18_SUPPORT := $(call numbers_less_than,24,$(TARGET_AVAILABLE_SDK_VERSIONS))
 TARGET_SDK_VERSIONS_WITHOUT_JAVA_19_SUPPORT := $(call numbers_less_than,30,$(TARGET_AVAILABLE_SDK_VERSIONS))
 
-# Missing optional uses-libraries so that the platform doesn't create build rules that depend on
-# them.
-INTERNAL_PLATFORM_MISSING_USES_LIBRARIES := \
-  com.google.android.ble \
-  com.google.android.media.effects \
-  com.google.android.wearable \
-
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
 TARGET_PREBUILT_TAG := android-$(TARGET_ARCH)
@@ -1182,16 +1212,7 @@
 RSCOMPAT_32BIT_ONLY_API_LEVELS := 8 9 10 11 12 13 14 15 16 17 18 19 20
 RSCOMPAT_NO_USAGEIO_API_LEVELS := 8 9 10 11 12 13
 
-# Add BUILD_NUMBER to apps default version name if it's unbundled build.
-ifdef TARGET_BUILD_APPS
-TARGET_BUILD_WITH_APPS_VERSION_NAME := true
-endif
-
-ifdef TARGET_BUILD_WITH_APPS_VERSION_NAME
-APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)-$(BUILD_NUMBER_FROM_FILE)
-else
 APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)
-endif
 
 # ANDROID_WARNING_ALLOWED_PROJECTS is generated by build/soong.
 define find_warning_allowed_projects
@@ -1231,6 +1252,9 @@
 
 .KATI_READONLY := JAVAC_NINJA_POOL R8_NINJA_POOL D8_NINJA_POOL
 
+# Soong modules that are known to have broken optional_uses_libs dependencies.
+BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST := LegacyCamera Gallery2
+
 # These goals don't need to collect and include Android.mks/CleanSpec.mks
 # in the source tree.
 dont_bother_goals := out product-graph
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 35c632c..d837c6e 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -155,6 +155,17 @@
   endif
 endif
 
+# Enable HWASan in included paths.
+ifeq ($(filter hwaddress, $(my_sanitize)),)
+  combined_include_paths := $(HWASAN_INCLUDE_PATHS) \
+                            $(PRODUCT_HWASAN_INCLUDE_PATHS)
+
+  ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_include_paths)),\
+         $(filter $(dir)%,$(LOCAL_PATH)))),)
+    my_sanitize := hwaddress $(my_sanitize)
+  endif
+endif
+
 # If CFI is disabled globally, remove it from my_sanitize.
 ifeq ($(strip $(ENABLE_CFI)),false)
   my_sanitize := $(filter-out cfi,$(my_sanitize))
@@ -238,6 +249,13 @@
   endif
 endif
 
+# Ignore SANITIZE_TARGET_DIAG=memtag_heap without SANITIZE_TARGET=memtag_heap
+# This can happen if a condition above filters out memtag_heap from
+# my_sanitize. It is easier to handle all of these cases here centrally.
+ifneq ($(filter memtag_heap,$(my_sanitize_diag)),)
+  my_sanitize_diag := $(filter-out memtag_heap,$(my_sanitize_diag))
+endif
+
 ifneq ($(filter memtag_heap,$(my_sanitize)),)
   my_cflags += -fsanitize=memtag-heap
   my_sanitize := $(filter-out memtag_heap,$(my_sanitize))
@@ -435,6 +453,13 @@
 # If local module needs HWASAN, add compiler flags.
 ifneq ($(filter hwaddress,$(my_sanitize)),)
   my_cflags += $(HWADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
+
+  ifneq ($(filter EXECUTABLES NATIVE_TESTS,$(LOCAL_MODULE_CLASS)),)
+    ifneq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
+      my_linker := /system/bin/linker_hwasan64
+    endif
+  endif
+
 endif
 
 # Use minimal diagnostics when integer overflow is enabled; never do it for HOST modules
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index 054d271..c457eb0 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -18,11 +18,9 @@
 # If we're using the VNDK, only vendor modules using the VNDK may use
 # LOCAL_COPY_HEADERS. Platform libraries will not have the include path
 # present.
-ifdef BOARD_VNDK_VERSION
 ifndef LOCAL_USE_VNDK
   $(call pretty-error,Only vendor modules using LOCAL_USE_VNDK may use LOCAL_COPY_HEADERS)
 endif
-endif
 
 # Clean up LOCAL_COPY_HEADERS_TO, since soong_ui will be comparing cleaned
 # paths to figure out which headers are obsolete and should be removed.
diff --git a/core/definitions.mk b/core/definitions.mk
index 778485e..be40584 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -75,9 +75,6 @@
 # All findbugs xml files
 ALL_FINDBUGS_FILES:=
 
-# GPL module license files
-ALL_GPL_MODULE_LICENSE_FILES:=
-
 # Packages with certificate violation
 CERTIFICATE_VIOLATION_MODULES :=
 
@@ -597,7 +594,7 @@
 define declare-copy-target-license-metadata
 $(strip $(if $(filter $(OUT_DIR)%,$(2)),\
   $(eval _tgt:=$(strip $(1)))\
-  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(sort $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2))))\
   $(eval ALL_COPIED_TARGETS += $(_tgt))))
 endef
 
@@ -897,7 +894,8 @@
 endef
 
 ###########################################################
-## Declare license dependencies $(2) for non-module target $(1)
+## Declare license dependencies $(2) with optional colon-separated
+## annotations for non-module target $(1)
 ###########################################################
 define declare-license-deps
 $(strip \
@@ -909,7 +907,8 @@
 endef
 
 ###########################################################
-## Declare license dependencies $(2) for non-module container-type target $(1)
+## Declare license dependencies $(2) with optional colon-separated
+## annotations for non-module container-type target $(1)
 ##
 ## Container-type targets are targets like .zip files that
 ## merely aggregate other files.
@@ -2546,7 +2545,87 @@
         @$(call emit-line,$(wordlist 58001,58500,$(1)),$(2))
         @$(call emit-line,$(wordlist 58501,59000,$(1)),$(2))
         @$(call emit-line,$(wordlist 59001,59500,$(1)),$(2))
-        @$(if $(wordlist 59501,59502,$(1)),$(error Too many words ($(words $(1)))))
+        @$(call emit-line,$(wordlist 59501,60000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 60001,60500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 60501,61000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 61001,61500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 61501,62000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 62001,62500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 62501,63000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 63001,63500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 63501,64000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 64001,64500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 64501,65000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 65001,65500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 65501,66000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 66001,66500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 66501,67000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 67001,67500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 67501,68000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 68001,68500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 68501,69000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 69001,69500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 69501,70000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 70001,70500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 70501,71000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 71001,71500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 71501,72000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 72001,72500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 72501,73000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 73001,73500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 73501,74000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 74001,74500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 74501,75000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 75001,75500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 75501,76000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 76001,76500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 76501,77000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 77001,77500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 77501,78000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 78001,78500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 78501,79000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 79001,79500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 79501,80000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 80001,80500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 80501,81000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 81001,81500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 81501,82000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 82001,82500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 82501,83000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 83001,83500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 83501,84000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 84001,84500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 84501,85000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 85001,85500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 85501,86000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 86001,86500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 86501,87000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 87001,87500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 87501,88000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 88001,88500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 88501,89000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 89001,89500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 89501,90000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 90001,90500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 90501,91000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 91001,91500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 91501,92000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 92001,92500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 92501,93000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 93001,93500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 93501,94000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 94001,94500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 94501,95000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 95001,95500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 95501,96000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 96001,96500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 96501,97000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 97001,97500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 97501,98000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 98001,98500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 98501,99000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 99001,99500,$(1)),$(2))
+        @$(if $(wordlist 99501,99502,$(1)),$(error dump-words-to-file: Too many words ($(words $(1)))))
 endef
 # Return jar arguments to compress files in a given directory
 # $(1): directory
@@ -2880,7 +2959,7 @@
   $(extract-package) \
   echo "Module name in Android tree: $(PRIVATE_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo "Local path in Android tree: $(PRIVATE_PATH)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  echo "Install path on $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT): $(PRIVATE_INSTALLED_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+  echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
 ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
diff --git a/core/device.mk b/core/device.mk
deleted file mode 100644
index 20ff447..0000000
--- a/core/device.mk
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-_device_var_list := \
-    DEVICE_NAME \
-    DEVICE_BOARD \
-    DEVICE_REGION
-
-define dump-device
-$(info ==== $(1) ====)\
-$(foreach v,$(_device_var_list),\
-$(info DEVICES.$(1).$(v) := $(DEVICES.$(1).$(v))))\
-$(info --------)
-endef
-
-define dump-devices
-$(foreach p,$(DEVICES),$(call dump-device,$(p)))
-endef
-
-#
-# $(1): device to inherit
-#
-define inherit-device
-  $(foreach v,$(_device_var_list), \
-      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(strip $(1))))
-endef
-
-#
-# $(1): device makefile list
-#
-#TODO: check to make sure that devices have all the necessary vars defined
-define import-devices
-$(call import-nodes,DEVICES,$(1),$(_device_var_list))
-endef
-
-
-#
-# $(1): short device name like "sooner"
-#
-define _resolve-short-device-name
-  $(eval dn := $(strip $(1)))
-  $(eval d := \
-      $(foreach d,$(DEVICES), \
-          $(if $(filter $(dn),$(DEVICES.$(d).DEVICE_NAME)), \
-            $(d) \
-       )) \
-   )
-  $(eval d := $(sort $(d)))
-  $(if $(filter 1,$(words $(d))), \
-    $(d), \
-    $(if $(filter 0,$(words $(d))), \
-      $(error No matches for device "$(dn)"), \
-      $(error Device "$(dn)" ambiguous: matches $(d)) \
-    ) \
-  )
-endef
-
-#
-# $(1): short device name like "sooner"
-#
-define resolve-short-device-name
-$(strip $(call _resolve-short-device-name,$(1)))
-endef
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 88ec47f..86ca729 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -80,19 +80,45 @@
   $(foreach m,$(other_system_server_jars),\
     $(PRODUCT_OUT)/$(call word-colon,1,$(m))/framework/$(call word-colon,2,$(m)).jar)
 
+# Infix can be 'art' (ART image for testing), 'boot' (primary), or 'mainline' (mainline extension).
+# Soong creates a set of variables for Make, one or each boot image. The only reason why the ART
+# image is exposed to Make is testing (art gtests) and benchmarking (art golem benchmarks). Install
+# rules that use those variables are in dex_preopt_libart.mk. Here for dexpreopt purposes the infix
+# is always 'boot' or 'mainline'.
+DEXPREOPT_INFIX := $(if $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)),mainline,boot)
+
+# The input variables are written by build/soong/java/dexpreopt_bootjars.go. Examples can be found
+# at the bottom of build/soong/java/dexpreopt_config_testing.go.
+dexpreopt_root_dir := $(dir $(patsubst %/,%,$(dir $(firstword $(bootclasspath_jars)))))
+booclasspath_arg := $(subst $(space),:,$(patsubst $(dexpreopt_root_dir)%,%,$(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)))
+booclasspath_locations_arg := $(subst $(space),:,$(DEXPREOPT_BOOTCLASSPATH_DEX_LOCATIONS))
+boot_images := $(subst :,$(space),$(DEXPREOPT_IMAGE_LOCATIONS_ON_DEVICE$(DEXPREOPT_INFIX)))
+boot_image_arg := $(subst $(space),:,$(patsubst /%,%,$(boot_images)))
+
+boot_zip_metadata_txt := $(dir $(boot_zip))boot_zip/METADATA.txt
+$(boot_zip_metadata_txt):
+	rm -f $@
+	echo "booclasspath = $(booclasspath_arg)" >> $@
+	echo "booclasspath-locations = $(booclasspath_locations_arg)" >> $@
+	echo "boot-image = $(boot_image_arg)" >> $@
+
+$(call dist-for-goals, droidcore, $(boot_zip_metadata_txt))
+
 $(boot_zip): PRIVATE_BOOTCLASSPATH_JARS := $(bootclasspath_jars)
 $(boot_zip): PRIVATE_SYSTEM_SERVER_JARS := $(system_server_jars)
-$(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art)
+$(boot_zip): $(bootclasspath_jars) $(system_server_jars) $(SOONG_ZIP) $(MERGE_ZIPS) $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art) $(DEXPREOPT_IMAGE_ZIP_mainline) $(boot_zip_metadata_txt)
 	@echo "Create boot package: $@"
 	rm -f $@
 	$(SOONG_ZIP) -o $@.tmp \
 	  -C $(dir $(firstword $(PRIVATE_BOOTCLASSPATH_JARS)))/.. $(addprefix -f ,$(PRIVATE_BOOTCLASSPATH_JARS)) \
-	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS))
-	$(MERGE_ZIPS) $@ $@.tmp $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art)
+	  -C $(PRODUCT_OUT) $(addprefix -f ,$(PRIVATE_SYSTEM_SERVER_JARS)) \
+	  -j -f $(boot_zip_metadata_txt)
+	$(MERGE_ZIPS) $@ $@.tmp $(DEXPREOPT_IMAGE_ZIP_boot) $(DEXPREOPT_IMAGE_ZIP_art) $(DEXPREOPT_IMAGE_ZIP_mainline)
 	rm -f $@.tmp
 
 $(call dist-for-goals, droidcore, $(boot_zip))
 
+ifneq (,$(filter true,$(ART_MODULE_BUILD_FROM_SOURCE) $(MODULE_BUILD_FROM_SOURCE)))
 # Build the system_server.zip which contains the Apex system server jars and standalone system server jars
 system_server_zip := $(PRODUCT_OUT)/system_server.zip
 apex_system_server_jars := \
@@ -122,5 +148,6 @@
 
 $(call dist-for-goals, droidcore, $(system_server_zip))
 
+endif  #ART_MODULE_BUILD_FROM_SOURCE || MODULE_BUILD_FROM_SOURCE
 endif  #PRODUCT_USES_DEFAULT_ART_CONFIG
 endif  #WITH_DEXPREOPT
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index e36e2eb..7b9c4db 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -12,9 +12,15 @@
   # would result in passing bad arguments to dex2oat and failing the build.
   ENABLE_PREOPT :=
   ENABLE_PREOPT_BOOT_IMAGES :=
-else ifeq (true,$(DISABLE_PREOPT))
-  # Disable dexpreopt for libraries/apps, but do compile boot images.
-  ENABLE_PREOPT :=
+else
+  ifeq (true,$(DISABLE_PREOPT))
+    # Disable dexpreopt for libraries/apps, but may compile boot images.
+    ENABLE_PREOPT :=
+  endif
+  ifeq (true,$(DISABLE_PREOPT_BOOT_IMAGES))
+    # Disable dexpreopt for boot images, but may compile libraries/apps.
+    ENABLE_PREOPT_BOOT_IMAGES :=
+  endif
 endif
 
 # The default value for LOCAL_DEX_PREOPT
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index b303b52..bdd47a8 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -84,12 +84,13 @@
 ifndef LOCAL_DEX_PREOPT_GENERATE_PROFILE
   # If LOCAL_DEX_PREOPT_GENERATE_PROFILE is not defined, default it based on the existence of the
   # profile class listing. TODO: Use product specific directory here.
-  my_classes_directory := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)
-  LOCAL_DEX_PREOPT_PROFILE := $(my_classes_directory)/$(LOCAL_MODULE).prof
+  ifdef PRODUCT_DEX_PREOPT_PROFILE_DIR
+    LOCAL_DEX_PREOPT_PROFILE := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)/$(LOCAL_MODULE).prof
 
-  ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
-    my_process_profile := true
-    my_profile_is_text_listing :=
+    ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
+      my_process_profile := true
+      my_profile_is_text_listing :=
+    endif
   endif
 else
   my_process_profile := $(LOCAL_DEX_PREOPT_GENERATE_PROFILE)
@@ -110,18 +111,19 @@
 # Local module variables and functions used in dexpreopt and manifest_check.
 ################################################################################
 
-my_filtered_optional_uses_libraries := $(filter-out $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES), \
-  $(LOCAL_OPTIONAL_USES_LIBRARIES))
-
 # TODO(b/132357300): This may filter out too much, as PRODUCT_PACKAGES doesn't
 # include all packages (the full list is unknown until reading all Android.mk
 # makefiles). As a consequence, a library may be present but not included in
 # dexpreopt, which will result in class loader context mismatch and a failure
-# to load dexpreopt code on device. We should fix this, either by deferring
-# dependency computation until the full list of product packages is known, or
-# by adding product-specific lists of missing libraries.
+# to load dexpreopt code on device.
+# However, we have to do filtering here. Otherwise, we may include extra
+# libraries that Soong and Make don't generate build rules for (e.g., a library
+# that exists in the source tree but not installable), and therefore get Ninja
+# errors.
+# We have deferred CLC computation to the Ninja phase, but the dependency
+# computation still needs to be done early. For now, this is the best we can do.
 my_filtered_optional_uses_libraries := $(filter $(PRODUCT_PACKAGES), \
-  $(my_filtered_optional_uses_libraries))
+  $(LOCAL_OPTIONAL_USES_LIBRARIES))
 
 ifeq ($(LOCAL_MODULE_CLASS),APPS)
   # compatibility libraries are added to class loader context of an app only if
@@ -240,7 +242,7 @@
     --enforce-uses-libraries-relax,)
   my_dexpreopt_config_args := $(patsubst %,--dexpreopt-config %,$(my_dexpreopt_dep_configs))
 
-  my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
+  my_enforced_uses_libraries := $(intermediates)/enforce_uses_libraries.status
   $(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(my_uses_libs_args)
   $(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
   $(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
@@ -272,12 +274,7 @@
 my_dexpreopt_images_deps :=
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
-# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
-# for each boot image (primary and the framework extension). The only reason why
-# the primary image is exposed to Make is testing (art gtests) and benchmarking
-# (art golem benchmarks). Install rules that use those variables are in
-# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
-my_dexpreopt_infix := boot
+my_dexpreopt_infix := $(DEXPREOPT_INFIX)
 my_create_dexpreopt_config :=
 
 ifdef LOCAL_DEX_PREOPT
@@ -447,6 +444,7 @@
 
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
   my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
+  DEXPREOPT.$(LOCAL_MODULE).POST_INSTALLED_DEXPREOPT_ZIP := $(my_dexpreopt_zip)
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
@@ -461,7 +459,8 @@
 	-global $(PRIVATE_GLOBAL_CONFIG) \
 	-module $(PRIVATE_MODULE_CONFIG) \
 	-dexpreopt_script $@ \
-	-out_dir $(OUT_DIR)
+	-out_dir $(OUT_DIR) \
+	-product_packages $(PRODUCT_OUT)/product_packages.txt
 
   my_dexpreopt_deps := $(my_dex_jar)
   my_dexpreopt_deps += $(if $(my_process_profile),$(LOCAL_DEX_PREOPT_PROFILE))
@@ -471,7 +470,7 @@
   my_dexpreopt_deps += $(my_dexpreopt_images_deps)
   my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
   ifeq ($(LOCAL_ENFORCE_USES_LIBRARIES),true)
-    my_dexpreopt_deps += $(intermediates.COMMON)/enforce_uses_libraries.status
+    my_dexpreopt_deps += $(intermediates)/enforce_uses_libraries.status
   endif
 
   $(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
@@ -506,4 +505,4 @@
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
-endif # my_create_dexpreopt_config
\ No newline at end of file
+endif # my_create_dexpreopt_config
diff --git a/core/distdir.mk b/core/distdir.mk
index bce8e7f..032d1b7 100644
--- a/core/distdir.mk
+++ b/core/distdir.mk
@@ -45,6 +45,18 @@
     $(eval _all_dist_goal_output_pairs += $$(goal):$$(dst))))
 endef
 
+define add_file_name_tag_suffix
+$(basename $(notdir $1))-FILE_NAME_TAG_PLACEHOLDER$(suffix $1)
+endef
+
+# This function appends suffix FILE_NAME_TAG_PLACEHOLDER from the input file
+# $(1): a list of goals  (e.g. droid, sdk, ndk). These must be PHONY
+# $(2): the dist files to add to those goals.
+define dist-for-goals-with-filenametag
+$(if $(strip $(2)), \
+  $(foreach file,$(2), \
+    $(call dist-for-goals,$(1),$(file):$(call add_file_name_tag_suffix,$(file)))))
+endef
 .PHONY: shareprojects
 
 define __share-projects-rule
@@ -209,4 +221,4 @@
           fi))
 endef
 
-.KATI_READONLY := dist-for-goals dist-write-file
+.KATI_READONLY := dist-for-goals dist-write-file dist-for-goals-with-filenametag
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 7dd9b12..f5a2022 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -24,14 +24,35 @@
 #$(warning $(call find_and_earlier,A B C,C))
 #$(warning $(call find_and_earlier,A B C,D))
 
-define version-list
-$(1)P1A $(1)P1B $(1)P2A $(1)P2B $(1)D1A $(1)D1B $(1)D2A $(1)D2B $(1)Q1A $(1)Q1B $(1)Q2A $(1)Q2B $(1)Q3A $(1)Q3B
+# Runs a starlark file, and sets all the variables in its top-level
+# variables_to_export_to_make variable as make variables.
+#
+# In order to avoid running starlark every time the stamp file is checked, we use
+# $(KATI_shell_no_rerun). Then, to make sure that we actually do rerun kati when
+# modifying the starlark files, we add the starlark files to the kati stamp file with
+# $(KATI_extra_file_deps).
+#
+# Arguments:
+#  $(1): A single starlark file to use as the entrypoint
+#  $(2): An optional list of starlark files to NOT include as kati dependencies.
+#  $(3): An optional list of extra flags to pass to rbcrun
+define run-starlark
+$(eval _starlark_results := $(OUT_DIR)/starlark_results/$(subst /,_,$(1)).mk)
+$(KATI_shell_no_rerun mkdir -p $(OUT_DIR)/starlark_results && $(OUT_DIR)/rbcrun --mode=make $(3) $(1) >$(_starlark_results) && touch -t 200001010000 $(_starlark_results))
+$(if $(filter-out 0,$(.SHELLSTATUS)),$(error Starlark failed to run))
+$(eval include $(_starlark_results))
+$(KATI_extra_file_deps $(filter-out $(2),$(LOADED_STARLARK_FILES)))
+$(eval LOADED_STARLARK_FILES :=)
+$(eval _starlark_results :=)
 endef
 
-PREV_VERSIONS := OPR1 OPD1 OPD2 OPM1 OPM2 PPR1 PPD1 PPD2 PPM1 PPM2 QPR1
-ALL_VERSIONS := Q R S T U V W X Y Z
-ALL_VERSIONS := $(PREV_VERSIONS) $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
-PREV_VERSIONS :=
+# ---------------------------------------------------------------
+# Release config
+include $(BUILD_SYSTEM)/release_config.mk
+
+# ---------------------------------------------------------------
+# defines ALL_VERSIONS
+$(call run-starlark,build/make/core/all_versions.bzl)
 
 # Filters ALL_VERSIONS down to the range [$1, $2], and errors if $1 > $2 or $3 is
 # not in [$1, $2]
@@ -45,7 +66,7 @@
   $(if $(filter $(ALL_VERSIONS),$(2)),,
     $(error Invalid MAX_PLATFORM_VERSION '$(2)'))
   $(if $(filter $(ALL_VERSIONS),$(3)),,
-    $(error Invalid DEFAULT_PLATFORM_VERSION '$(3)'))
+    $(error Invalid RELEASE_PLATFORM_VERSION '$(3)'))
 
   $(eval allowed_versions_ := $(call find_and_earlier,$(ALL_VERSIONS),$(2)))
 
@@ -56,7 +77,7 @@
     $(filter-out $(call find_and_earlier,$(allowed_versions_),$(1)),$(allowed_versions_)))
 
   $(if $(filter $(allowed_versions_),$(3)),,
-    $(error DEFAULT_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
+    $(error RELEASE_PLATFORM_VERSION '$(3)' must be between MIN_PLATFORM_VERSION '$(1)' and MAX_PLATFORM_VERSION '$(2)'))
 
   $(allowed_versions_))
 endef
@@ -339,6 +360,7 @@
   RBC_PRODUCT_CONFIG \
   RBC_BOARD_CONFIG \
   SOONG_% \
+  TARGET_RELEASE \
   TOPDIR \
   TRACE_BEGIN_SOONG \
   USER)
@@ -553,6 +575,8 @@
 TARGET_OUT_NOTICE_FILES := $(TARGET_OUT_INTERMEDIATES)/NOTICE_FILES
 TARGET_OUT_FAKE := $(PRODUCT_OUT)/fake_packages
 TARGET_OUT_TESTCASES := $(PRODUCT_OUT)/testcases
+TARGET_OUT_FLAGS := $(TARGET_OUT_INTERMEDIATES)/FLAGS
+
 .KATI_READONLY := \
   TARGET_OUT_EXECUTABLES \
   TARGET_OUT_OPTIONAL_EXECUTABLES \
@@ -566,7 +590,8 @@
   TARGET_OUT_ETC \
   TARGET_OUT_NOTICE_FILES \
   TARGET_OUT_FAKE \
-  TARGET_OUT_TESTCASES
+  TARGET_OUT_TESTCASES \
+  TARGET_OUT_FLAGS
 
 ifeq ($(SANITIZE_LITE),true)
 # When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index 9079981..e149ef4 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -1,6 +1,6 @@
 include $(CLEAR_VARS)
 
-enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro_$(enforce_rro_partition)
+enforce_rro_module := $(enforce_rro_source_module)__$(PRODUCT_NAME)__auto_generated_rro_$(enforce_rro_partition)
 LOCAL_PACKAGE_NAME := $(enforce_rro_module)
 
 intermediates := $(call intermediates-dir-for,APPS,$(LOCAL_PACKAGE_NAME),,COMMON)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 89aa53c..d45da48 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -98,9 +98,7 @@
 $(full_classes_combined_jar): $(full_classes_compiled_jar) \
                               $(jar_manifest_file) \
                               $(full_static_java_libs) | $(MERGE_ZIPS)
-	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
-            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(PRIVATE_JAR_MANIFEST)) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
 
diff --git a/core/instrumentation_test_config_template.xml b/core/instrumentation_test_config_template.xml
index 6ca964e..379126c 100644
--- a/core/instrumentation_test_config_template.xml
+++ b/core/instrumentation_test_config_template.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
+<!-- Copyright (C) 2023 The Android Open Source Project
 
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
@@ -24,7 +24,7 @@
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.{TEST_TYPE}" >
-        <option name="package" value="{PACKAGE}" />
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="package" value="{PACKAGE}" />
         <option name="runner" value="{RUNNER}" />
     </test>
 </configuration>
diff --git a/core/java.mk b/core/java.mk
index b13ef4d..842fcbf 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -296,9 +296,7 @@
 $(full_classes_combined_jar): $(full_classes_compiled_jar) \
                               $(jar_manifest_file) \
                               $(full_static_java_libs) | $(MERGE_ZIPS)
-	$(if $(PRIVATE_JAR_MANIFEST), $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
-            $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf)
-	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(dir $@)/manifest.mf) \
+	$(MERGE_ZIPS) -j --ignore-duplicates $(if $(PRIVATE_JAR_MANIFEST),-m $(PRIVATE_JAR_MANIFEST)) \
             $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,-stripDir META-INF -zipToNotStrip $<) \
             $@ $< $(PRIVATE_STATIC_JAVA_LIBRARIES)
 
diff --git a/core/java_common.mk b/core/java_common.mk
index 5981b60..0e03d0b 100644
--- a/core/java_common.mk
+++ b/core/java_common.mk
@@ -296,16 +296,16 @@
       # Note: the lib naming scheme must be kept in sync with build/soong/java/sdk_library.go.
       sdk_lib_suffix = $(call pretty-error,sdk_lib_suffix was not set correctly)
       ifeq (current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_stubs_current
+        sdk_module := $(ANDROID_PUBLIC_STUBS)
         sdk_lib_suffix := .stubs
       else ifeq (system_current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_system_stubs_current
+        sdk_module := $(ANDROID_SYSTEM_STUBS)
         sdk_lib_suffix := .stubs.system
       else ifeq (test_current,$(LOCAL_SDK_VERSION))
-        sdk_module := android_test_stubs_current
+        sdk_module := $(ANDROID_TEST_STUBS)
         sdk_lib_suffix := .stubs.test
       else ifeq (core_current,$(LOCAL_SDK_VERSION))
-        sdk_module := core.current.stubs
+        sdk_module := $(ANDROID_CORE_STUBS)
         sdk_lib_suffix = $(call pretty-error,LOCAL_SDK_LIBRARIES not supported for LOCAL_SDK_VERSION = core_current)
       endif
       sdk_libs := $(foreach lib_name,$(LOCAL_SDK_LIBRARIES),$(lib_name)$(sdk_lib_suffix))
diff --git a/core/java_host_test_config_template.xml b/core/java_host_test_config_template.xml
index 26c1caf..e123dc7 100644
--- a/core/java_host_test_config_template.xml
+++ b/core/java_host_test_config_template.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2018 The Android Open Source Project
+<!-- Copyright (C) 2023 The Android Open Source Project
 
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
@@ -21,6 +21,6 @@
     {EXTRA_CONFIGS}
 
     <test class="com.android.tradefed.testtype.HostTest" >
-        <option name="jar" value="{MODULE}.jar" />
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="jar" value="{MODULE}.jar" />
     </test>
 </configuration>
diff --git a/core/local_vndk.mk b/core/local_vndk.mk
index befbc59..eb8f2c0 100644
--- a/core/local_vndk.mk
+++ b/core/local_vndk.mk
@@ -37,12 +37,5 @@
     $(shell echo $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): LOCAL_USE_VNDK must not be used with LOCAL_SDK_VERSION >&2)
     $(error done)
   endif
-
-  # If we're not using the VNDK, drop all restrictions
-  ifndef BOARD_VNDK_VERSION
-    LOCAL_USE_VNDK:=
-    LOCAL_USE_VNDK_VENDOR:=
-    LOCAL_USE_VNDK_PRODUCT:=
-  endif
 endif
 
diff --git a/core/main.mk b/core/main.mk
index 3866037..40e690d 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -40,31 +40,23 @@
 # Write the build number to a file so it can be read back in
 # without changing the command line every time.  Avoids rebuilds
 # when using ninja.
-$(shell mkdir -p $(SOONG_OUT_DIR) && \
-    echo -n $(BUILD_NUMBER) > $(SOONG_OUT_DIR)/build_number.tmp; \
-    if ! cmp -s $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; then \
-        mv $(SOONG_OUT_DIR)/build_number.tmp $(SOONG_OUT_DIR)/build_number.txt; \
-    else \
-        rm $(SOONG_OUT_DIR)/build_number.tmp; \
-    fi)
 BUILD_NUMBER_FILE := $(SOONG_OUT_DIR)/build_number.txt
-.KATI_READONLY := BUILD_NUMBER_FILE
 $(KATI_obsolete_var BUILD_NUMBER,See https://android.googlesource.com/platform/build/+/master/Changes.md#BUILD_NUMBER)
+BUILD_HOSTNAME_FILE := $(SOONG_OUT_DIR)/build_hostname.txt
+$(KATI_obsolete_var BUILD_HOSTNAME,Use BUILD_HOSTNAME_FROM_FILE instead)
+$(KATI_obsolete_var FILE_NAME_TAG,https://android.googlesource.com/platform/build/+/master/Changes.md#FILE_NAME_TAG)
+
 $(BUILD_NUMBER_FILE):
-	touch $@
+	# empty rule to prevent dangling rule error for a file that is written by soong_ui
+$(BUILD_HOSTNAME_FILE):
+	# empty rule to prevent dangling rule error for a file that is written by soong_ui
+
+.KATI_RESTAT: $(BUILD_NUMBER_FILE)
+.KATI_RESTAT: $(BUILD_HOSTNAME_FILE)
 
 DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
 .KATI_READONLY := DATE_FROM_FILE
 
-# Pick a reasonable string to use to identify files.
-ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
-  # BUILD_NUMBER has a timestamp in it, which means that
-  # it will change every time.  Pick a stable value.
-  FILE_NAME_TAG := eng.$(BUILD_USERNAME)
-else
-  FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
-endif
-.KATI_READONLY := FILE_NAME_TAG
 
 # Make an empty directory, which can be used to make empty jars
 EMPTY_DIRECTORY := $(OUT_DIR)/empty
@@ -72,8 +64,6 @@
 
 # CTS-specific config.
 -include cts/build/config.mk
-# VTS-specific config.
--include test/vts/tools/vts-tradefed/build/config.mk
 # device-tests-specific-config.
 -include tools/tradefederation/build/suites/device-tests/config.mk
 # general-tests-specific-config.
@@ -190,9 +180,7 @@
 ADDITIONAL_SYSTEM_PROPERTIES += ro.treble.enabled=${PRODUCT_FULL_TREBLE}
 
 $(KATI_obsolete_var PRODUCT_FULL_TREBLE,\
-	Code should be written to work regardless of a device being Treble or \
-	variables like PRODUCT_SEPOLICY_SPLIT should be used until that is \
-	possible.)
+	Code should be written to work regardless of a device being Treble)
 
 # Sets ro.actionable_compatible_property.enabled to know on runtime whether the
 # allowed list of actionable compatible properties is enabled or not.
@@ -347,8 +335,13 @@
 
 ifeq ($(AB_OTA_UPDATER),true)
 ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
+ADDITIONAL_VENDOR_PROPERTIES += ro.vendor.build.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
 endif
 
+# Set this property for VTS to skip large page size tests on unsupported devices.
+ADDITIONAL_PRODUCT_PROPERTIES += \
+    ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
+
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -764,6 +757,9 @@
     $(info $(word 1,$(r)) module $(word 2,$(r)) requires non-existent $(word 3,$(r)) module: $(word 4,$(r))) \
   )
   $(warning Set BUILD_BROKEN_MISSING_REQUIRED_MODULES := true to bypass this check if this is intentional)
+  ifneq (,$(PRODUCT_SOURCE_ROOT_DIRS))
+    $(warning PRODUCT_SOURCE_ROOT_DIRS is non-empty. Some necessary modules may have been skipped by Soong)
+  endif
   $(error Build failed)
 endif # _nonexistent_required != empty
 endif # check_missing_required_modules == true
@@ -1251,6 +1247,7 @@
     $(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
     $(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
     $(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+    $(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
     $(call auto-included-modules) \
   ) \
   $(eval ### Filter out the overridden packages and executables before doing expansion) \
@@ -1345,6 +1342,13 @@
                   $(if $(ALL_MODULES.$(m).INSTALLED),\
                     $(if $(filter-out $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,\
                       $(m))))
+    ifeq ($(TARGET_ARCH),riscv64)
+      # HACK: riscv64 can't build the device version of bcc and ld.mc due to a
+      # dependency on an old version of LLVM, but they are listed in
+      # base_system.mk which can't add them conditionally based on the target
+      # architecture.
+      _host_modules := $(filter-out bcc ld.mc,$(_host_modules))
+    endif
     $(call maybe-print-list-and-error,$(sort $(_host_modules)),\
       Host modules should be in PRODUCT_HOST_PACKAGES$(comma) not PRODUCT_PACKAGES)
   endif
@@ -1377,30 +1381,7 @@
     $(CUSTOM_MODULES) \
   )
 
-ifdef FULL_BUILD
-#
-# Used by the cleanup logic in soong_ui to remove files that should no longer
-# be installed.
-#
-
-# Include all tests, so that we remove them from the test suites / testcase
-# folders when they are removed.
-test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
-
-$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
-
-$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
-  $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
-    $(modules_to_install) $(test_files)))))
-
-$(file >$(HOST_OUT)/.installable_test_files,$(sort \
-  $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
-    $(test_files)))))
-
-test_files :=
-endif
-
-# Dedpulicate compatibility suite dist files across modules and packages before
+# Deduplicate compatibility suite dist files across modules and packages before
 # copying them to their requested locations. Assign the eval result to an unused
 # var to prevent Make from trying to make a sense of it.
 _unused := $(call copy-many-files, $(sort $(ALL_COMPATIBILITY_DIST_FILES)))
@@ -1458,6 +1439,28 @@
 modules_to_install := $(sort $(ALL_DEFAULT_INSTALLED_MODULES))
 ALL_DEFAULT_INSTALLED_MODULES :=
 
+ifdef FULL_BUILD
+#
+# Used by the cleanup logic in soong_ui to remove files that should no longer
+# be installed.
+#
+
+# Include all tests, so that we remove them from the test suites / testcase
+# folders when they are removed.
+test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
+
+$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
+
+$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
+  $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
+    $(modules_to_install) $(test_files)))))
+
+$(file >$(HOST_OUT)/.installable_test_files,$(sort \
+  $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
+    $(test_files)))))
+
+test_files :=
+endif
 
 # Some notice deps refer to module names without prefix or arch suffix where
 # only the variants with them get built.
@@ -1739,15 +1742,15 @@
   endif
 
   $(PROGUARD_DICT_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_MAPPING))
+  $(call dist-for-goals-with-filenametag,apps_only, $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_ZIP) $(PROGUARD_DICT_MAPPING))
   $(call declare-container-license-deps,$(PROGUARD_DICT_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(PROGUARD_USAGE_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(PROGUARD_USAGE_ZIP))
+  $(call dist-for-goals-with-filenametag,apps_only, $(PROGUARD_USAGE_ZIP))
   $(call declare-container-license-deps,$(PROGUARD_USAGE_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(SYMBOLS_ZIP) : $(apps_only_installed_files)
-  $(call dist-for-goals,apps_only, $(SYMBOLS_ZIP) $(SYMBOLS_MAPPING))
+  $(call dist-for-goals-with-filenametag,apps_only, $(SYMBOLS_ZIP) $(SYMBOLS_MAPPING))
   $(call declare-container-license-deps,$(SYMBOLS_ZIP),$(apps_only_installed_files),$(PRODUCT_OUT)/:/)
 
   $(COVERAGE_ZIP) : $(apps_only_installed_files)
@@ -1793,17 +1796,23 @@
   # avoid disting targets that would cause building framework java sources,
   # which we want to avoid in an unbundled build.
 
-  $(call dist-for-goals, droidcore-unbundled, \
+  $(call dist-for-goals-with-filenametag, droidcore-unbundled, \
     $(INTERNAL_UPDATE_PACKAGE_TARGET) \
     $(INTERNAL_OTA_PACKAGE_TARGET) \
-    $(INTERNAL_OTA_METADATA) \
     $(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
+    $(BUILT_RAMDISK_16K_TARGET) \
+    $(BUILT_KERNEL_16K_TARGET) \
     $(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
     $(SYMBOLS_ZIP) \
     $(SYMBOLS_MAPPING) \
     $(PROGUARD_DICT_ZIP) \
     $(PROGUARD_DICT_MAPPING) \
     $(PROGUARD_USAGE_ZIP) \
+    $(BUILT_TARGET_FILES_PACKAGE) \
+  )
+
+  $(call dist-for-goals, droidcore-unbundled, \
+    $(INTERNAL_OTA_METADATA) \
     $(COVERAGE_ZIP) \
     $(INSTALLED_FILES_FILE) \
     $(INSTALLED_FILES_JSON) \
@@ -1831,7 +1840,6 @@
     $(INSTALLED_ODM_BUILD_PROP_TARGET):build.prop-odm \
     $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET):build.prop-system_ext \
     $(INSTALLED_RAMDISK_BUILD_PROP_TARGET):build.prop-ramdisk \
-    $(BUILT_TARGET_FILES_PACKAGE) \
     $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
     $(INSTALLED_MISC_INFO_TARGET) \
     $(INSTALLED_RAMDISK_TARGET) \
@@ -1843,7 +1851,7 @@
     $(call dist-for-goals, droidcore-unbundled, $(f)))
 
   ifneq ($(ANDROID_BUILD_EMBEDDED),true)
-    $(call dist-for-goals, droidcore, \
+    $(call dist-for-goals-with-filenametag, droidcore, \
       $(APPS_ZIP) \
       $(INTERNAL_EMULATOR_PACKAGE_TARGET) \
     )
@@ -1892,11 +1900,11 @@
   endif
 
   # Put XML formatted API files in the dist dir.
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,android_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,android_system_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,android_module_lib_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,android_system_server_stubs_current) $(APICHECK)
-  $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,android_test_stubs_current) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/api.xml: $(call java-lib-files,$(ANDROID_PUBLIC_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-api.xml: $(call java-lib-files,$(ANDROID_SYSTEM_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/module-lib-api.xml: $(call java-lib-files,$(ANDROID_MODULE_LIB_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/system-server-api.xml: $(call java-lib-files,$(ANDROID_SYSTEM_SERVER_STUBS)) $(APICHECK)
+  $(TARGET_OUT_COMMON_INTERMEDIATES)/test-api.xml: $(call java-lib-files,$(ANDROID_TEST_STUBS)) $(APICHECK)
 
   api_xmls := $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/,api.xml system-api.xml module-lib-api.xml system-server-api.xml test-api.xml)
   $(api_xmls):
@@ -1943,10 +1951,8 @@
 ifeq ($(HOST_OS),linux)
 ALL_SDK_TARGETS := $(INTERNAL_SDK_TARGET)
 sdk: $(ALL_SDK_TARGETS)
-$(call dist-for-goals,sdk, \
-    $(ALL_SDK_TARGETS) \
-    $(INSTALLED_BUILD_PROP_TARGET) \
-)
+$(call dist-for-goals-with-filenametag,sdk,$(ALL_SDK_TARGETS))
+$(call dist-for-goals,sdk,$(INSTALLED_BUILD_PROP_TARGET))
 endif
 
 # umbrella targets to assit engineers in verifying builds
@@ -2019,6 +2025,191 @@
 # missing dependency errors.
 $(call build-license-metadata)
 
+# Generate SBOM in SPDX format
+product_copy_files_without_owner := $(foreach pcf,$(PRODUCT_COPY_FILES),$(call word-colon,1,$(pcf)):$(call word-colon,2,$(pcf)))
+ifeq ($(TARGET_BUILD_APPS),)
+dest_files_without_source := $(sort $(foreach pcf,$(product_copy_files_without_owner),$(if $(wildcard $(call word-colon,1,$(pcf))),,$(call word-colon,2,$(pcf)))))
+dest_files_without_source := $(addprefix $(PRODUCT_OUT)/,$(dest_files_without_source))
+filter_out_files := \
+  $(PRODUCT_OUT)/apex/% \
+  $(PRODUCT_OUT)/fake_packages/% \
+  $(PRODUCT_OUT)/testcases/% \
+  $(dest_files_without_source)
+# Check if each partition image is built, if not filter out all its installed files
+# Also check if a partition uses prebuilt image file, save the info if prebuilt image is used.
+PREBUILT_PARTITION_COPY_FILES :=
+# product.img
+ifndef BUILDING_PRODUCT_IMAGE
+filter_out_files += $(PRODUCT_OUT)/product/%
+ifdef BOARD_PREBUILT_PRODUCTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_PRODUCTIMAGE):$(INSTALLED_PRODUCTIMAGE_TARGET)
+endif
+endif
+
+# system.img
+ifndef BUILDING_SYSTEM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system/%
+endif
+# system_dlkm.img
+ifndef BUILDING_SYSTEM_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_dlkm/%
+ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_SYSTEM_DLKMIMAGE):$(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+endif
+endif
+# system_ext.img
+ifndef BUILDING_SYSTEM_EXT_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_ext/%
+ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_SYSTEM_EXTIMAGE):$(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+endif
+endif
+# system_other.img
+ifndef BUILDING_SYSTEM_OTHER_IMAGE
+filter_out_files += $(PRODUCT_OUT)/system_other/%
+endif
+
+# odm.img
+ifndef BUILDING_ODM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/odm/%
+ifdef BOARD_PREBUILT_ODMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_ODMIMAGE):$(INSTALLED_ODMIMAGE_TARGET)
+endif
+endif
+# odm_dlkm.img
+ifndef BUILDING_ODM_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/odm_dlkm/%
+ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_ODM_DLKMIMAGE):$(INSTALLED_ODM_DLKMIMAGE_TARGET)
+endif
+endif
+
+# vendor.img
+ifndef BUILDING_VENDOR_IMAGE
+filter_out_files += $(PRODUCT_OUT)/vendor/%
+ifdef BOARD_PREBUILT_VENDORIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_VENDORIMAGE):$(INSTALLED_VENDORIMAGE_TARGET)
+endif
+endif
+# vendor_dlkm.img
+ifndef BUILDING_VENDOR_DLKM_IMAGE
+filter_out_files += $(PRODUCT_OUT)/vendor_dlkm/%
+ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_VENDOR_DLKMIMAGE):$(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+endif
+endif
+
+# cache.img
+ifndef BUILDING_CACHE_IMAGE
+filter_out_files += $(PRODUCT_OUT)/cache/%
+endif
+
+# boot.img
+ifndef BUILDING_BOOT_IMAGE
+ifdef BOARD_PREBUILT_BOOTIMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_BOOTIMAGE):$(INSTALLED_BOOTIMAGE_TARGET)
+endif
+endif
+# init_boot.img
+ifndef BUILDING_INIT_BOOT_IMAGE
+ifdef BOARD_PREBUILT_INIT_BOOT_IMAGE
+PREBUILT_PARTITION_COPY_FILES += $(BOARD_PREBUILT_INIT_BOOT_IMAGE):$(INSTALLED_INIT_BOOT_IMAGE_TARGET)
+endif
+endif
+
+# ramdisk.img
+ifndef BUILDING_RAMDISK_IMAGE
+filter_out_files += $(PRODUCT_OUT)/ramdisk/%
+endif
+
+# recovery.img
+ifndef INSTALLED_RECOVERYIMAGE_TARGET
+filter_out_files += $(PRODUCT_OUT)/recovery/%
+endif
+
+installed_files := $(sort $(filter-out $(filter_out_files),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
+else
+installed_files := $(apps_only_installed_files)
+endif  # TARGET_BUILD_APPS
+
+# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py.
+# There are multiple columns and each identifies the source of an installed file for a specific case.
+# The columns and their uses are described as below:
+#   installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk
+#   module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2
+#   soong_module_type: Soong module type, e.g. android_app, cc_binary
+#   is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk
+#   product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc
+#   kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files
+#   is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y
+#       is_build_prop: build.prop in each partition, see sysprop.mk.
+#       is_notice_file: NOTICE.xml.gz in each partition, see Makefile.
+#       is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make
+#       is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET
+#       is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER
+#       is_event_log_tags_file: see variable event_log_tags_file in Makefile
+#       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
+#       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
+#       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
+
+# (TODO: b/272358583 find another way of always rebuilding this target)
+# Remove the sbom-metadata.csv whenever makefile is evaluated
+$(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
+$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
+	rm -f $@
+	@echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated,build_output_path >> $@
+	$(foreach f,$(installed_files),\
+	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
+	  $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
+	  $(eval _post_installed_dexpreopt_zip := $(DEXPREOPT.$(_module_name).POST_INSTALLED_DEXPREOPT_ZIP)) \
+	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
+	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
+	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
+	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
+	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
+	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
+	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
+	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
+	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
+	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
+	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
+	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
+	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(_build_output_path) >> $@ $(newline) \
+	  $(if $(_post_installed_dexpreopt_zip), \
+	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i >> $@ ; done $(newline) \
+	  ) \
+	)
+
+.PHONY: sbom
+ifeq ($(TARGET_BUILD_APPS),)
+sbom: $(PRODUCT_OUT)/sbom.spdx.json
+$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
+$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json
+
+$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
+else
+apps_only_sbom_files := $(sort $(patsubst %,%.spdx.json,$(filter %.apk,$(apps_only_installed_files))))
+$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --unbundled_apk
+
+sbom: $(apps_only_sbom_files)
+
+$(foreach f,$(apps_only_sbom_files),$(eval $(patsubst %.spdx.json,%-fragment.spdx,$f): $f))
+apps_only_fragment_files := $(patsubst %.spdx.json,%-fragment.spdx,$(apps_only_sbom_files))
+$(foreach f,$(apps_only_fragment_files),$(eval apps_only_fragment_dist_files += :sbom/$(notdir $f)))
+
+$(foreach f,$(apps_only_sbom_files),$(eval apps_only_sbom_dist_files += :sbom/$(notdir $f)))
+$(call dist-for-goals,apps_only,$(join $(apps_only_sbom_files),$(apps_only_sbom_dist_files)) $(join $(apps_only_fragment_files),$(apps_only_fragment_dist_files)))
+endif
+
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/native_test_config_template.xml b/core/native_test_config_template.xml
index ea982cf..788157c 100644
--- a/core/native_test_config_template.xml
+++ b/core/native_test_config_template.xml
@@ -1,5 +1,5 @@
 <?xml version="1.0" encoding="utf-8"?>
-<!-- Copyright (C) 2017 The Android Open Source Project
+<!-- Copyright (C) 2023 The Android Open Source Project
 
      Licensed under the Apache License, Version 2.0 (the "License");
      you may not use this file except in compliance with the License.
@@ -26,7 +26,7 @@
     </target_preparer>
 
     <test class="com.android.tradefed.testtype.GTest" >
-        <option name="native-test-device-path" value="{TEST_INSTALL_BASE}" />
+        {EXTRA_TEST_RUNNER_CONFIGS}<option name="native-test-device-path" value="{TEST_INSTALL_BASE}" />
         <option name="module-name" value="{MODULE}" />
     </test>
 </configuration>
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index db7c422..1e1b7df 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -21,8 +21,8 @@
 	$(copy-file-to-target)
 endif
 
-$(call declare-0p-target,$(target_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_notice_html_or_xml_gz))
+$(call declare-1p-target,$(target_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_notice_html_or_xml_gz))
 endif
 
 .PHONY: vendorlicense
@@ -43,8 +43,8 @@
 $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_notice_xml_gz))
 endif
 
 .PHONY: odmlicense
@@ -62,8 +62,8 @@
 $(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_notice_xml_gz))
 endif
 
 .PHONY: oemlicense
@@ -84,8 +84,8 @@
 $(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_product_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_product_notice_xml_gz))
+$(call declare-1p-target,$(target_product_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_product_notice_xml_gz))
 endif
 
 .PHONY: systemextlicense
@@ -103,8 +103,8 @@
 $(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
+$(call declare-1p-target,$(target_system_ext_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_system_ext_notice_xml_gz))
 endif
 
 .PHONY: vendor_dlkmlicense
@@ -122,8 +122,8 @@
 $(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_dlkm_notice_xml_gz))
 endif
 
 .PHONY: odm_dlkmlicense
@@ -141,8 +141,8 @@
 $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_dlkm_notice_xml_gz))
 endif
 
 .PHONY: system_dlkmlicense
@@ -160,8 +160,8 @@
 $(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_system_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_sysetm_dlkm_notice_xml_gz))
 endif
 
 endif # not TARGET_BUILD_APPS
diff --git a/core/package_internal.mk b/core/package_internal.mk
index c7a173b..7cfab5b 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -111,24 +111,26 @@
 
 # Determine whether auto-RRO is enabled for this package.
 enforce_rro_enabled :=
-ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
-  # * means all system and system_ext APKs, so enable conditionally based on module path.
+ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS)))
+  ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
+    # * means all system and system_ext APKs, so enable conditionally based on module path.
 
-  # Note that base_rules.mk has not yet been included, so it's likely that only
-  # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
-  ifeq (,$(LOCAL_MODULE_PATH))
-    non_rro_target_module := $(filter true,\
-        $(LOCAL_ODM_MODULE) \
-        $(LOCAL_OEM_MODULE) \
-        $(LOCAL_PRODUCT_MODULE) \
-        $(LOCAL_PROPRIETARY_MODULE) \
-        $(LOCAL_VENDOR_MODULE))
-    enforce_rro_enabled := $(if $(non_rro_target_module),,true)
-  else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+    # Note that base_rules.mk has not yet been included, so it's likely that only
+    # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
+    ifeq (,$(LOCAL_MODULE_PATH))
+      non_rro_target_module := $(filter true,\
+          $(LOCAL_ODM_MODULE) \
+          $(LOCAL_OEM_MODULE) \
+          $(LOCAL_PRODUCT_MODULE) \
+          $(LOCAL_PROPRIETARY_MODULE) \
+          $(LOCAL_VENDOR_MODULE))
+      enforce_rro_enabled := $(if $(non_rro_target_module),,true)
+    else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+      enforce_rro_enabled := true
+    endif
+  else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
     enforce_rro_enabled := true
   endif
-else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
-  enforce_rro_enabled := true
 endif
 
 product_package_overlays := $(strip \
@@ -201,10 +203,10 @@
 all_resources := $(strip $(my_res_resources) $(my_overlay_resources))
 
 # The linked resource package.
-my_res_package := $(intermediates)/package-res.apk
+my_res_package := $(intermediates.COMMON)/package-res.apk
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 
-my_bundle_module := $(intermediates)/base.zip
+my_bundle_module := $(intermediates.COMMON)/base.zip
 LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
 
 # Always run aapt2, because we need to at least compile the AndroidManifest.xml.
@@ -570,7 +572,7 @@
 	$(compress-package)
 endif  # LOCAL_COMPRESSED_MODULE
 
-my_package_res_pb := $(intermediates)/package-res.pb.apk
+my_package_res_pb := $(intermediates.COMMON)/package-res.pb.apk
 $(my_package_res_pb): $(my_res_package) $(AAPT2)
 	$(AAPT2) convert --output-format proto $< -o $@
 
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index ef1471d..5bea9b6 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -57,6 +57,9 @@
   $(error $(LOCAL_MODULE) : unexpected LOCAL_MODULE_CLASS for prebuilts: $(LOCAL_MODULE_CLASS))
 endif
 
+$(if $(filter-out $(SOONG_ANDROID_MK),$(LOCAL_MODULE_MAKEFILE)), \
+  $(eval ALL_MODULES.$(my_register_name).IS_PREBUILT_MAKE_MODULE := Y))
+
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
diff --git a/core/product.mk b/core/product.mk
index f4d5a4f..6f54b78 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -27,7 +27,13 @@
 _product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
 _product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
 
-# The resoure configuration options to use for this product.
+# Defines the ELF segment alignment for binaries (executables and shared libraries).
+# The ELF segment alignment has to be a PAGE_SIZE multiple. For example, if
+# PRODUCT_MAX_PAGE_SIZE_SUPPORTED=65536, the possible values for PAGE_SIZE could be
+# 4096, 16384 and 65536.
+_product_single_value_vars += PRODUCT_MAX_PAGE_SIZE_SUPPORTED
+
+# The resource configuration options to use for this product.
 _product_list_vars += PRODUCT_LOCALES
 _product_list_vars += PRODUCT_AAPT_CONFIG
 _product_single_value_vars += PRODUCT_AAPT_PREF_CONFIG
@@ -36,6 +42,7 @@
 _product_list_vars += PRODUCT_PACKAGES
 _product_list_vars += PRODUCT_PACKAGES_DEBUG
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ARM64
 # Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
 _product_list_vars += PRODUCT_PACKAGES_ENG
@@ -237,6 +244,9 @@
 # Whether any paths are excluded from sanitization when SANITIZE_TARGET=cfi
 _product_list_vars += PRODUCT_CFI_EXCLUDE_PATHS
 
+# Whether any paths should have HWASan enabled for components
+_product_list_vars += PRODUCT_HWASAN_INCLUDE_PATHS
+
 # Whether the Scudo hardened allocator is disabled platform-wide
 _product_single_value_vars += PRODUCT_DISABLE_SCUDO
 
@@ -269,6 +279,9 @@
 # List of tags that will be used to gate blueprint modules from the build graph
 _product_list_vars += PRODUCT_INCLUDE_TAGS
 
+# List of directories that will be used to gate blueprint modules from the build graph
+_product_list_vars += PRODUCT_SOURCE_ROOT_DIRS
+
 # When this is true, various build time as well as runtime debugfs restrictions are enabled.
 _product_single_value_vars += PRODUCT_SET_DEBUGFS_RESTRICTIONS
 
@@ -376,6 +389,9 @@
 # If true, installs a full version of com.android.virt APEX.
 _product_single_value_vars += PRODUCT_AVF_ENABLED
 
+# If true, kernel with modules will be used for Microdroid VMs.
+_product_single_value_vars += PRODUCT_AVF_KERNEL_MODULES_ENABLED
+
 # List of .json files to be merged/compiled into vendor/etc/linker.config.pb
 _product_list_vars += PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS
 
@@ -391,6 +407,12 @@
 #   supports it
 _product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
 
+# Specifies COW version to be used by update_engine and libsnapshot. If this value is not
+# specified we default to COW version 2 in update_engine for backwards compatibility
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COW_VERSION
+
+_product_list_vars += PRODUCT_AFDO_PROFILES
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/product_config.mk b/core/product_config.mk
index 7055a1e..3f9eb24 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -74,7 +74,7 @@
 ###########################################################
 
 define find-copy-subdir-files
-$(sort $(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g"))
+$(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g" | sort)
 endef
 
 #
@@ -144,7 +144,6 @@
 #
 include $(BUILD_SYSTEM)/node_fns.mk
 include $(BUILD_SYSTEM)/product.mk
-include $(BUILD_SYSTEM)/device.mk
 
 # Read all product definitions.
 #
@@ -224,7 +223,7 @@
 endif
 
 ifeq (,$(current_product_makefile))
-  $(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
+  $(error Cannot locate config makefile for product "$(TARGET_PRODUCT)")
 endif
 
 ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
@@ -237,14 +236,22 @@
   $(shell mkdir -p $(OUT_DIR)/rbc)
   $(call dump-variables-rbc, $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
 
-  $(shell build/soong/scripts/update_out \
-    $(OUT_DIR)/rbc/rbc_product_config_results.mk \
-    build/soong/scripts/rbc-run \
-    $(current_product_makefile) \
-    $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
+  $(shell $(OUT_DIR)/mk2rbc \
+    --mode=write -r --outdir $(OUT_DIR)/rbc \
+    --launcher=$(OUT_DIR)/rbc/launcher.rbc \
+    --input_variables=$(OUT_DIR)/rbc/make_vars_pre_product_config.mk \
+    --makefile_list=$(OUT_DIR)/.module_paths/configuration.list \
+    $(current_product_makefile))
   ifneq ($(.SHELLSTATUS),0)
     $(error product configuration converter failed: $(.SHELLSTATUS))
   endif
+
+  $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_product_config_results.mk \
+    $(OUT_DIR)/rbcrun --mode=rbc $(OUT_DIR)/rbc/launcher.rbc)
+  ifneq ($(.SHELLSTATUS),0)
+    $(error product configuration runner failed: $(.SHELLSTATUS))
+  endif
+
   include $(OUT_DIR)/rbc/rbc_product_config_results.mk
 endif
 
@@ -280,6 +287,15 @@
 $(foreach include_tag,$(PRODUCT_INCLUDE_TAGS), \
 	$(if $(filter $(include_tag),$(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST)),,\
 	$(call pretty-error, $(include_tag) is not in BLUEPRINT_INCLUDE_TAGS_ALLOWLIST: $(BLUEPRINT_INCLUDE_TAGS_ALLOWLIST))))
+# Create default PRODUCT_INCLUDE_TAGS
+ifeq (, $(PRODUCT_INCLUDE_TAGS))
+# Soong analysis is global: even though a module might not be relevant to a specific product (e.g. build_tools for aosp_arm),
+# we still analyse it.
+# This means that in setups where we two have two prebuilts of module_sdk, we need a "default" to use in analysis
+# This should be a no-op in aosp and internal since no Android.bp file contains blueprint_package_includes
+PRODUCT_INCLUDE_TAGS += com.android.mainline # Use the big android one by default
+endif
+
 #############################################################################
 
 # Quick check and assign default values
@@ -518,7 +534,8 @@
     PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := $(OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)
   endif
 else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  # No shipping level defined
+  # No shipping level defined. Enforce the product interface by default.
+  PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
 else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
   # Enforce product interface if PRODUCT_SHIPPING_API_LEVEL is greater than 29.
   PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
@@ -533,7 +550,8 @@
 ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
   PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
 else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
-  # No shipping level defined
+  # No shipping level defined. Enforce the product interface by default.
+  PRODUCT_USE_PRODUCT_VNDK := true
 else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
   # Enforce product interface for VNDK if PRODUCT_SHIPPING_API_LEVEL is greater
   # than 29.
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 97c1d00..921f068 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -54,25 +54,16 @@
     if value == None:
         return
     if type(value) == "list":
-        if _options.rearrange:
-            value = __printvars_rearrange_list(value)
-        if _options.format == "pretty":
-            print(attr, "=", repr(value))
-        elif _options.format == "make":
-            value = list(value)
-            for i, x in enumerate(value):
-                if type(x) == "tuple" and len(x) == 1:
-                    value[i] = "@inherit:" + x[0] + ".mk"
-                elif type(x) != "string":
-                    fail("Wasn't a list of strings:", attr, " value:", value)
-            print(attr, ":=", " ".join(value))
-    elif _options.format == "pretty":
-        print(attr, "=", repr(value))
-    elif _options.format == "make":
+        value = list(value)
+        for i, x in enumerate(value):
+            if type(x) == "tuple" and len(x) == 1:
+                value[i] = "@inherit:" + x[0] + ".mk"
+            elif type(x) != "string":
+                fail("Wasn't a list of strings:", attr, " value:", value)
+        print(attr, ":=", " ".join(value))
+    else:
         # Trim all spacing to a single space
         print(attr, ":=", _mkstrip(value))
-    else:
-        fail("bad output format", _options.format)
 
 def _printvars(state):
     """Prints configuration and global variables."""
@@ -83,8 +74,7 @@
             for nsname, nsvars in sorted(val.items()):
                 # Define SOONG_CONFIG_<ns> for Make, othewise
                 # it cannot be added to .KATI_READONLY list
-                if _options.format == "make":
-                    print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
+                print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
                 for var, val in sorted(nsvars.items()):
                     if val:
                         __print_attr("SOONG_CONFIG_%s_%s" % (nsname, var), val)
@@ -105,11 +95,6 @@
         elif attr not in globals_base or globals_base[attr] != val:
             __print_attr(attr, val)
 
-def __printvars_rearrange_list(value_list):
-    """Rearrange value list: return only distinct elements, maybe sorted."""
-    seen = {item: 0 for item in value_list}
-    return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
-
 def __sort_pcm_names(pcm_names):
     # We have to add an extension back onto the pcm names when sorting,
     # or else the sort order could be wrong when one is a prefix of another.
@@ -394,7 +379,7 @@
 def _soong_config_set(g, nsname, var, value):
     """Assigns the value to the variable in the namespace."""
     _soong_config_namespace(g, nsname)
-    g[_soong_config_namespaces_key][nsname][var]=value
+    g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value)
 
 def _soong_config_append(g, nsname, var, value):
     """Appends to the value of the variable in the namespace."""
@@ -402,9 +387,9 @@
     ns = g[_soong_config_namespaces_key][nsname]
     oldv = ns.get(var)
     if oldv == None:
-        ns[var] = value
+        ns[var] = _mkstrip(value)
     else:
-        ns[var] += " " + value
+        ns[var] += " " + _mkstrip(value)
 
 
 def _soong_config_get(g, nsname, var):
@@ -691,16 +676,8 @@
     rblf_log(file, "warning", message, sep = ':')
 
 def _mk2rbc_error(loc, message):
-    """Prints a message about conversion error and stops.
-
-    If RBC_MK2RBC_CONTINUE environment variable is set,
-    the execution will continue after the message is printed.
-    """
-    if _options.mk2rbc_continue:
-        rblf_log(loc, message, sep = ':')
-    else:
-        _mkerror(loc, message)
-
+    """Prints a message about conversion error and stops."""
+    _mkerror(loc, message)
 
 def _mkinfo(file, message = ""):
     """Prints info."""
@@ -873,39 +850,12 @@
             # Cause the variable to appear set like the make version does
             g[v] = ""
 
-
-def __get_options():
-    """Returns struct containing runtime global settings."""
-    settings = dict(
-        format = "pretty",
-        rearrange = "",
-        trace_modules = False,
-        trace_variables = [],
-        mk2rbc_continue = False,
-    )
-    for x in getattr(rblf_cli, "RBC_OUT", "").split(","):
-        if x == "sort" or x == "unique":
-            if settings["rearrange"]:
-                fail("RBC_OUT: either sort or unique is allowed (and sort implies unique)")
-            settings["rearrange"] = x
-        elif x == "pretty" or x == "make":
-            settings["format"] = x
-        elif x == "global":
-            # TODO: Remove this, kept for backwards compatibility
-            pass
-        elif x != "":
-            fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
-    for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
-        if x == "!trace":
-            settings["trace_modules"] = True
-        elif x != "":
-            settings["trace_variables"].append(x)
-    if getattr(rblf_cli, "RBC_MK2RBC_CONTINUE", ""):
-        settings["mk2rbc_continue"] = True
-    return struct(**settings)
-
 # Settings used during debugging.
-_options = __get_options()
+_options = struct(
+    trace_modules = False,
+    trace_variables = [],
+)
+
 rblf = struct(
     soong_config_namespace = _soong_config_namespace,
     soong_config_append = _soong_config_append,
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 7e7b270..b59527a 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -41,6 +41,11 @@
     java.lang.Object readResolve();
 }
 
+# Keep all Javascript API methods
+-keepclassmembers class * {
+    @android.webkit.JavascriptInterface <methods>;
+}
+
 # Keep Throwable's constructor that takes a String argument.
 -keepclassmembers class * extends java.lang.Throwable {
   <init>(java.lang.String);
diff --git a/core/python_binary_host_mobly_test_config_template.xml b/core/python_binary_host_mobly_test_config_template.xml
new file mode 100644
index 0000000..a6576cd
--- /dev/null
+++ b/core/python_binary_host_mobly_test_config_template.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for {MODULE} mobly test">
+    {EXTRA_CONFIGS}
+
+    <device name="device1"></device>
+    <device name="device2"></device>
+
+    <test class="com.android.tradefed.testtype.mobly.MoblyBinaryHostTest">
+      <!-- The mobly-par-file-name should match the module name -->
+      <option name="mobly-par-file-name" value="{MODULE}" />
+      <!-- Timeout limit in milliseconds for all test cases of the python binary -->
+      <option name="mobly-test-timeout" value="300000" />
+    </test>
+</configuration>
diff --git a/core/release_config.bzl b/core/release_config.bzl
new file mode 100644
index 0000000..1346508
--- /dev/null
+++ b/core/release_config.bzl
@@ -0,0 +1,120 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Partitions that get build system flag summaries
+_flag_partitions = [
+    "product",
+    "system",
+    "system_ext",
+    "vendor",
+]
+
+ALL = ["all"]
+PRODUCT = ["product"]
+SYSTEM = ["system"]
+SYSTEM_EXT = ["system_ext"]
+VENDOR = ["vendor"]
+
+_valid_types = ["NoneType", "bool", "list", "string", "int"]
+
+def flag(name, partitions, default):
+    "Declare a flag."
+    if not partitions:
+        fail("At least 1 partition is required")
+    if not name.startswith("RELEASE_"):
+        fail("Release flag names must start with RELEASE_")
+    if " " in name or "\t" in name or "\n" in name:
+        fail("Flag names must not contain whitespace: \"" + name + "\"")
+    for partition in partitions:
+        if partition == "all":
+            if len(partitions) > 1:
+                fail("\"all\" can't be combined with other partitions: " + str(partitions))
+        elif partition not in _flag_partitions:
+            fail("Invalid partition: " + partition + ", allowed partitions: " +
+                 str(_flag_partitions))
+    if type(default) not in _valid_types:
+        fail("Invalid type of default for flag \"" + name + "\" (" + type(default) + ")")
+    return {
+        "name": name,
+        "partitions": partitions,
+        "default": default,
+    }
+
+def value(name, value):
+    "Define the flag value for a particular configuration."
+    return {
+        "name": name,
+        "value": value,
+    }
+
+def _format_value(val):
+    "Format the starlark type correctly for make"
+    if type(val) == "NoneType":
+        return ""
+    elif type(val) == "bool":
+        return "true" if val else ""
+    else:
+        return val
+
+def release_config(all_flags, all_values):
+    "Return the make variables that should be set for this release config."
+
+    # Validate flags
+    flag_names = []
+    for flag in all_flags:
+        if flag["name"] in flag_names:
+            fail(flag["declared_in"] + ": Duplicate declaration of flag " + flag["name"])
+        flag_names.append(flag["name"])
+
+    # Record which flags go on which partition
+    partitions = {}
+    for flag in all_flags:
+        for partition in flag["partitions"]:
+            if partition == "all":
+                for partition in _flag_partitions:
+                    partitions.setdefault(partition, []).append(flag["name"])
+            else:
+                partitions.setdefault(partition, []).append(flag["name"])
+
+    # Validate values
+    values = {}
+    for value in all_values:
+        if value["name"] not in flag_names:
+            fail(value["set_in"] + ": Value set for undeclared build flag: " + value["name"])
+        values[value["name"]] = value
+
+    # Collect values
+    result = {
+        "_ALL_RELEASE_FLAGS": sorted(flag_names),
+    }
+    for partition, names in partitions.items():
+        result["_ALL_RELEASE_FLAGS.PARTITIONS." + partition] = names
+    for flag in all_flags:
+        if flag["name"] in values:
+            val = values[flag["name"]]["value"]
+            set_in = values[flag["name"]]["set_in"]
+            if type(val) not in _valid_types:
+                fail("Invalid type of value for flag \"" + flag["name"] + "\" (" + type(val) + ")")
+        else:
+            val = flag["default"]
+            set_in = flag["declared_in"]
+        val = _format_value(val)
+        result[flag["name"]] = val
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".PARTITIONS"] = flag["partitions"]
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".DEFAULT"] = _format_value(flag["default"])
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".VALUE"] = val
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".DECLARED_IN"] = flag["declared_in"]
+        result["_ALL_RELEASE_FLAGS." + flag["name"] + ".SET_IN"] = set_in
+
+    return result
diff --git a/core/release_config.mk b/core/release_config.mk
new file mode 100644
index 0000000..3cd8b41
--- /dev/null
+++ b/core/release_config.mk
@@ -0,0 +1,153 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# -----------------------------------------------------------------
+# Choose the flag files
+# -----------------------------------------------------------------
+# Do this first, because we're going to unset TARGET_RELEASE before
+# including anyone, so they don't start making conditionals based on it.
+# This logic is in make because starlark doesn't understand optional
+# vendor files.
+
+# If this is a google source tree, restrict it to only the one file
+# which has OWNERS control.  If it isn't let others define their own.
+# TODO: Remove wildcard for build/release one when all branch manifests
+# have updated.
+config_map_files := $(wildcard build/release/release_config_map.mk) \
+    $(if $(wildcard vendor/google/release/release_config_map.mk), \
+        vendor/google/release/release_config_map.mk, \
+        $(sort \
+            $(wildcard device/*/release/release_config_map.mk) \
+            $(wildcard device/*/*/release/release_config_map.mk) \
+            $(wildcard vendor/*/release/release_config_map.mk) \
+            $(wildcard vendor/*/*/release/release_config_map.mk) \
+        ) \
+    )
+
+# $1 config name
+# $2 release config files
+define declare-release-config
+    $(eval # No duplicates)
+    $(if $(filter $(_all_release_configs), $(strip $(1))), \
+        $(error declare-release-config: config $(strip $(1)) declared in: $(_included) Previously declared here: $(_all_release_configs.$(strip $(1)).DECLARED_IN)) \
+    )
+    $(eval # Must have release config files)
+    $(if $(strip $(2)),,  \
+        $(error declare-release-config: config $(strip $(1)) must have release config files) \
+    )
+    $(eval _all_release_configs := $(sort $(_all_release_configs) $(strip $(1))))
+    $(eval _all_release_configs.$(strip $(1)).DECLARED_IN := $(_included))
+    $(eval _all_release_configs.$(strip $(1)).FILES := $(strip $(2)))
+endef
+
+# Include the config map files
+$(foreach f, $(config_map_files), \
+    $(eval _included := $(f)) \
+    $(eval include $(f)) \
+)
+
+# If TARGET_RELEASE is set, fail if there is no matching release config
+# If it isn't set, no release config files will be included and all flags
+# will get their default values.
+ifneq ($(TARGET_RELEASE),)
+ifeq ($(filter $(_all_release_configs), $(TARGET_RELEASE)),)
+    $(error No release config found for TARGET_RELEASE: $(TARGET_RELEASE). Available releases are: $(_all_release_configs))
+else
+    # Choose flag files
+    # Don't sort this, use it in the order they gave us.
+    flag_value_files := $(_all_release_configs.$(TARGET_RELEASE).FILES)
+endif
+else
+# Useful for finding scripts etc that aren't passing or setting TARGET_RELEASE
+ifneq ($(FAIL_IF_NO_RELEASE_CONFIG),)
+    $(error FAIL_IF_NO_RELEASE_CONFIG was set and TARGET_RELEASE was not)
+endif
+flag_value_files :=
+endif
+
+# Unset variables so they can't use them
+define declare-release-config
+$(error declare-release-config can only be called from inside release_config_map.mk files)
+endef
+
+# TODO: Remove this check after enough people have sourced lunch that we don't
+# need to worry about it trying to do get_build_vars TARGET_RELEASE. Maybe after ~9/2023
+ifneq ($(CALLED_FROM_SETUP),true)
+define TARGET_RELEASE
+$(error TARGET_RELEASE may not be accessed directly. Use individual flags.)
+endef
+else
+TARGET_RELEASE:=
+endif
+.KATI_READONLY := TARGET_RELEASE
+
+
+$(foreach config, $(_all_release_configs), \
+    $(eval _all_release_configs.$(config).DECLARED_IN:= ) \
+    $(eval _all_release_configs.$(config).FILES:= ) \
+)
+_all_release_configs:=
+config_map_files:=
+
+
+# -----------------------------------------------------------------
+# Flag declarations and values
+# -----------------------------------------------------------------
+# This part is in starlark.  We generate a root starlark file that loads
+# all of the flags declaration files that we found, and the flag_value_files
+# that we chose from the config map above.  Then we run that, and load the
+# results of that into the make environment.
+
+# If this is a google source tree, restrict it to only the one file
+# which has OWNERS control.  If it isn't let others define their own.
+# TODO: Remove wildcard for build/release one when all branch manifests
+# have updated.
+flag_declaration_files := $(wildcard build/release/build_flags.bzl) \
+    $(if $(wildcard vendor/google/release/build_flags.bzl), \
+        vendor/google/release/build_flags.bzl, \
+        $(sort \
+            $(wildcard device/*/release/build_flags.bzl) \
+            $(wildcard device/*/*/release/build_flags.bzl) \
+            $(wildcard vendor/*/release/build_flags.bzl) \
+            $(wildcard vendor/*/*/release/build_flags.bzl) \
+        ) \
+    )
+
+
+# Because starlark can't find files with $(wildcard), write an entrypoint starlark script that
+# contains the result of the above wildcards for the starlark code to use.
+filename_to_starlark=$(subst /,_,$(subst .,_,$(1)))
+_c:=load("//build/make/core/release_config.bzl", "release_config")
+_c+=$(newline)def add(d, k, v):
+_c+=$(newline)$(space)d = dict(d)
+_c+=$(newline)$(space)d[k] = v
+_c+=$(newline)$(space)return d
+_c+=$(foreach f,$(flag_declaration_files),$(newline)load("$(f)", flags_$(call filename_to_starlark,$(f)) = "flags"))
+_c+=$(newline)all_flags = [] $(foreach f,$(flag_declaration_files),+ [add(x, "declared_in", "$(f)") for x in flags_$(call filename_to_starlark,$(f))])
+_c+=$(foreach f,$(flag_value_files),$(newline)load("//$(f)", values_$(call filename_to_starlark,$(f)) = "values"))
+_c+=$(newline)all_values = [] $(foreach f,$(flag_value_files),+ [add(x, "set_in", "$(f)") for x in values_$(call filename_to_starlark,$(f))])
+_c+=$(newline)variables_to_export_to_make = release_config(all_flags, all_values)
+$(file >$(OUT_DIR)/release_config_entrypoint.bzl,$(_c))
+_c:=
+filename_to_starlark:=
+
+# Exclude the entrypoint file as a dependency (by passing it as the 2nd argument) so that we don't
+# rerun kati every build. Kati will replay the $(file) command that generates it every build,
+# updating its timestamp.
+#
+# We also need to pass --allow_external_entrypoint to rbcrun in case the OUT_DIR is set to something
+# outside of the source tree.
+$(call run-starlark,$(OUT_DIR)/release_config_entrypoint.bzl,$(OUT_DIR)/release_config_entrypoint.bzl,--allow_external_entrypoint)
+
diff --git a/core/sbom.mk b/core/sbom.mk
new file mode 100644
index 0000000..e23bbc1
--- /dev/null
+++ b/core/sbom.mk
@@ -0,0 +1,11 @@
+# For SBOM generation
+# This is included by base_rules.mk and is not necessary to be included in other .mk files
+# unless a .mk file changes its installed file after including base_rules.mk.
+
+ifdef my_register_name
+  ifneq (, $(strip $(ALL_MODULES.$(my_register_name).INSTALLED)))
+    $(foreach installed_file,$(ALL_MODULES.$(my_register_name).INSTALLED),\
+      $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name))\
+    )
+  endif
+endif
\ No newline at end of file
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 786a755..ccc5449 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -162,19 +162,21 @@
 # embedded JNI will already have been handled by soong
 my_embed_jni :=
 my_prebuilt_jni_libs :=
-ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
-  my_2nd_arch_prefix :=
-  LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
-  partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
-  include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
-endif
-ifdef TARGET_2ND_ARCH
-  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
-    my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
-    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
+    my_2nd_arch_prefix :=
+    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
     include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
   endif
+  ifdef TARGET_2ND_ARCH
+    ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
+      my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+      LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+      partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+      include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+    endif
+  endif
 endif
 LOCAL_SHARED_JNI_LIBRARIES :=
 my_embed_jni :=
@@ -237,26 +239,28 @@
 include $(BUILD_SYSTEM)/link_type.mk
 endif # !LOCAL_IS_HOST_MODULE
 
-ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
-  $(call append_enforce_rro_sources, \
-      $(my_register_name), \
-      false, \
-      $(LOCAL_FULL_MANIFEST_FILE), \
-      $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
-      $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
-      vendor \
-  )
-endif
+ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS)))
+  ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
+    $(call append_enforce_rro_sources, \
+        $(my_register_name), \
+        false, \
+        $(LOCAL_FULL_MANIFEST_FILE), \
+        $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+        $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
+        vendor \
+    )
+  endif
 
-ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
-  $(call append_enforce_rro_sources, \
-      $(my_register_name), \
-      false, \
-      $(LOCAL_FULL_MANIFEST_FILE), \
-      $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
-      $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
-      product \
-  )
+  ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
+    $(call append_enforce_rro_sources, \
+        $(my_register_name), \
+        false, \
+        $(LOCAL_FULL_MANIFEST_FILE), \
+        $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+        $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
+        product \
+    )
+  endif
 endif
 
 ifdef LOCAL_PREBUILT_COVERAGE_ARCHIVE
@@ -267,3 +271,8 @@
 endif
 
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 091fa34..6383393 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -1,17 +1,20 @@
 SOONG_MAKEVARS_MK := $(SOONG_OUT_DIR)/make_vars-$(TARGET_PRODUCT).mk
-SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.$(TARGET_PRODUCT).variables
 SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
 
-BINDER32BIT :=
-ifneq ($(TARGET_USES_64_BIT_BINDER),true)
-ifneq ($(TARGET_IS_64_BIT),true)
-BINDER32BIT := true
-endif
-endif
-
 include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
 
+ifndef AFDO_PROFILES
+# Set AFDO_PROFILES
+-include vendor/google_data/pgo_profile/sampling/afdo_profiles.mk
+else
+$(error AFDO_PROFILES can only be set from soong_config.mk. For product-specific fdo_profiles, please use PRODUCT_AFDO_PROFILES)
+endif
+
+# PRODUCT_AFDO_PROFILES takes precedence over product-agnostic profiles in AFDO_PROFILES
+ALL_AFDO_PROFILES := $(PRODUCT_AFDO_PROFILES) $(AFDO_PROFILES)
+
 ifeq ($(WRITE_SOONG_VARIABLES),true)
 
 # Create soong.variables with copies of makefile settings.  Runs every build,
@@ -31,6 +34,7 @@
 $(call add_json_val,  Platform_sdk_extension_version,    $(PLATFORM_SDK_EXTENSION_VERSION))
 $(call add_json_val,  Platform_base_sdk_extension_version, $(PLATFORM_BASE_SDK_EXTENSION_VERSION))
 $(call add_json_csv,  Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
+$(call add_json_csv,  Platform_version_all_preview_codenames, $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES))
 $(call add_json_str,  Platform_security_patch,           $(PLATFORM_SECURITY_PATCH))
 $(call add_json_str,  Platform_preview_sdk_version,      $(PLATFORM_PREVIEW_SDK_VERSION))
 $(call add_json_str,  Platform_base_os,                  $(PLATFORM_BASE_OS))
@@ -109,6 +113,7 @@
 $(call add_json_list, CFIExcludePaths,                   $(CFI_EXCLUDE_PATHS) $(PRODUCT_CFI_EXCLUDE_PATHS))
 $(call add_json_list, CFIIncludePaths,                   $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
 $(call add_json_list, IntegerOverflowExcludePaths,       $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
+$(call add_json_list, HWASanIncludePaths,                $(HWASAN_INCLUDE_PATHS) $(PRODUCT_HWASAN_INCLUDE_PATHS))
 
 $(call add_json_list, MemtagHeapExcludePaths,            $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
 $(call add_json_list, MemtagHeapAsyncIncludePaths,       $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS))
@@ -131,7 +136,6 @@
 $(call add_json_bool, SamplingPGO,                       $(filter true,$(SAMPLING_PGO)))
 
 $(call add_json_bool, ArtUseReadBarrier,                 $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
-$(call add_json_bool, Binder32bit,                       $(BINDER32BIT))
 $(call add_json_str,  BtConfigIncludeDir,                $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
 $(call add_json_list, DeviceKernelHeaders,               $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
 $(call add_json_str,  DeviceVndkVersion,                 $(BOARD_VNDK_VERSION))
@@ -145,6 +149,7 @@
 $(call add_json_bool, Malloc_zero_contents,              $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
 $(call add_json_bool, Malloc_pattern_fill_contents,      $(MALLOC_PATTERN_FILL_CONTENTS))
 $(call add_json_str,  Override_rs_driver,                $(OVERRIDE_RS_DRIVER))
+$(call add_json_str,  DeviceMaxPageSizeSupported,        $(TARGET_MAX_PAGE_SIZE_SUPPORTED))
 
 $(call add_json_bool, UncompressPrivAppDex,              $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
 $(call add_json_list, ModulesLoadedByPrivilegedModules,  $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
@@ -246,8 +251,6 @@
 
 $(call add_json_list, TargetFSConfigGen,                 $(TARGET_FS_CONFIG_GEN))
 
-$(call add_json_list, MissingUsesLibraries,              $(INTERNAL_PLATFORM_MISSING_USES_LIBRARIES))
-
 $(call add_json_map, VendorVars)
 $(foreach namespace,$(sort $(SOONG_CONFIG_NAMESPACES)),\
   $(call add_json_map, $(namespace))\
@@ -280,23 +283,25 @@
 
 $(call add_json_str,  ShippingApiLevel, $(PRODUCT_SHIPPING_API_LEVEL))
 
+$(call add_json_list, BuildBrokenPluginValidation,        $(BUILD_BROKEN_PLUGIN_VALIDATION))
 $(call add_json_bool, BuildBrokenClangProperty,           $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
 $(call add_json_bool, BuildBrokenClangAsFlags,            $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
 $(call add_json_bool, BuildBrokenClangCFlags,             $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
-$(call add_json_bool, BuildBrokenDepfile,                 $(filter true,$(BUILD_BROKEN_DEPFILE)))
+$(call add_json_bool, GenruleSandboxing,                  $(filter true,$(GENRULE_SANDBOXING)))
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
+$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
 $(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
 
+$(call add_json_list, BuildWarningBadOptionalUsesLibsAllowlist,    $(BUILD_WARNING_BAD_OPTIONAL_USES_LIBS_ALLOWLIST))
+
 $(call add_json_bool, BuildDebugfsRestrictionsEnabled, $(filter true,$(PRODUCT_SET_DEBUGFS_RESTRICTIONS)))
 
 $(call add_json_bool, RequiresInsecureExecmemForSwiftshader, $(filter true,$(PRODUCT_REQUIRES_INSECURE_EXECMEM_FOR_SWIFTSHADER)))
 
 $(call add_json_bool, SelinuxIgnoreNeverallows, $(filter true,$(SELINUX_IGNORE_NEVERALLOWS)))
 
-$(call add_json_bool, SepolicySplit, $(filter true,$(PRODUCT_SEPOLICY_SPLIT)))
-
 $(call add_json_list, SepolicyFreezeTestExtraDirs,         $(SEPOLICY_FREEZE_TEST_EXTRA_DIRS))
 $(call add_json_list, SepolicyFreezeTestExtraPrebuiltDirs, $(SEPOLICY_FREEZE_TEST_EXTRA_PREBUILT_DIRS))
 
@@ -305,6 +310,16 @@
 $(call add_json_bool, IgnorePrefer32OnDevice, $(filter true,$(IGNORE_PREFER32_ON_DEVICE)))
 
 $(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
+$(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
+
+$(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
+
+$(call add_json_str,  ProductManufacturer, $(PRODUCT_MANUFACTURER))
+$(call add_json_str,  ProductBrand,        $(PRODUCT_BRAND))
+$(call add_json_list, BuildVersionTags,    $(BUILD_VERSION_TAGS))
+
+$(call add_json_str, ReleaseVersion,    $(_RELEASE_VERSION))
+$(call add_json_list, ReleaseDeviceConfigValueSets,    $(RELEASE_DEVICE_CONFIG_VALUE_SETS))
 
 $(call json_end)
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index b7f0651..a2296a8 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -141,7 +141,7 @@
 	    fi;)
 	$(hide) echo "# end of file" >> $$@
 
-$(call declare-0p-target,$(2))
+$(call declare-1p-target,$(2))
 endef
 
 # -----------------------------------------------------------------
@@ -174,7 +174,7 @@
   ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
     BF_BUILD_NUMBER := $(BUILD_USERNAME)$$($(DATE_FROM_FILE) +%m%d%H%M)
   else
-    BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
+    BF_BUILD_NUMBER := $(BUILD_NUMBER_FROM_FILE)
   endif
   BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
 endif
@@ -196,6 +196,9 @@
 endif
 
 BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt
+ifeq ($(strip $(HAS_BUILD_NUMBER)),true)
+$(BUILD_THUMBPRINT_FILE): $(BUILD_NUMBER_FILE)
+endif
 ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE)))
   $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))")
 endif
@@ -260,7 +263,11 @@
 endef
 
 gen_from_buildinfo_sh := $(call intermediates-dir-for,PACKAGING,system_build_prop)/buildinfo.prop
-$(gen_from_buildinfo_sh): $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) | $(BUILD_DATETIME_FILE) $(BUILD_NUMBER_FILE)
+
+ifeq ($(strip $(HAS_BUILD_NUMBER)),true)
+$(gen_from_buildinfo_sh): $(BUILD_NUMBER_FILE)
+endif
+$(gen_from_buildinfo_sh): $(INTERNAL_BUILD_ID_MAKEFILE) $(API_FINGERPRINT) $(BUILD_HOSTNAME_FILE) | $(BUILD_DATETIME_FILE)
 	$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
 	        TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \
 	        TARGET_DEVICE="$(TARGET_DEVICE)" \
@@ -271,7 +278,7 @@
 	        BUILD_DISPLAY_ID="$(BUILD_DISPLAY_ID)" \
 	        DATE="$(DATE_FROM_FILE)" \
 	        BUILD_USERNAME="$(BUILD_USERNAME)" \
-	        BUILD_HOSTNAME="$(BUILD_HOSTNAME)" \
+	        BUILD_HOSTNAME="$(BUILD_HOSTNAME_FROM_FILE)" \
 	        BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
 	        BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT="$(BOARD_USE_VBMETA_DIGTEST_IN_FINGERPRINT)" \
 	        PLATFORM_VERSION="$(PLATFORM_VERSION)" \
@@ -543,3 +550,19 @@
     $(empty)))
 
 $(eval $(call declare-1p-target,$(INSTALLED_RAMDISK_BUILD_PROP_TARGET)))
+
+ALL_INSTALLED_BUILD_PROP_FILES := \
+  $(INSTALLED_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_BUILD_PROP_TARGET) \
+  $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) \
+  $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system/build.prop
+define is-build-prop
+$(if $(findstring $1,$(ALL_INSTALLED_BUILD_PROP_FILES)),Y)
+endef
\ No newline at end of file
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index 2af1ded..ff9eb09 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -24,25 +24,55 @@
     $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
     $(_cmf_src)))
 
-$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+# Create an artifact to include a list of test config files in art-host-tests.
+art_host_tests_list_zip := $(PRODUCT_OUT)/art-host-tests_list.zip
+# Create an artifact to include all test config files in art-host-tests.
+art_host_tests_configs_zip := $(PRODUCT_OUT)/art-host-tests_configs.zip
+# Create an artifact to include all shared library files in art-host-tests.
+art_host_tests_host_shared_libs_zip := $(PRODUCT_OUT)/art-host-tests_host-shared-libs.zip
 
+$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_list_zip := $(art_host_tests_list_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_configs_zip := $(art_host_tests_configs_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_host_shared_libs_zip := $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
 $(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests) $(SOONG_ZIP)
-	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $@.list
-	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
-	$(hide) touch $@-host-libs.list
+	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
+	rm -f $@ $(PRIVATE_art_host_tests_list_zip)
+	mkdir -p $(PRIVATE_INTERMEDIATES_DIR)
+	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
+	$(hide) touch $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
-	  echo $$shared_lib >> $@-host-libs.list; \
+	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
-	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
-	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
+	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
+	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
-$(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
+$(call dist-for-goals, art-host-tests, $(art_host_tests_zip) $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip))
 
 $(call declare-1p-container,$(art_host_tests_zip),)
 $(call declare-container-license-deps,$(art_host_tests_zip),$(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests),$(PRODUCT_OUT)/:/)
 
 tests: art-host-tests
+
+intermediates_dir :=
+art_host_tests_zip :=
+art_host_tests_list_zip :=
+art_host_tests_configs_zip :=
+art_host_tests_host_shared_libs_zip :=
diff --git a/core/tasks/collect_gpl_sources.mk b/core/tasks/collect_gpl_sources.mk
deleted file mode 100644
index 9e9ab8e..0000000
--- a/core/tasks/collect_gpl_sources.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The rule below doesn't have dependenices on the files that it copies,
-# so manually generate into a PACKAGING intermediate dir, which is wiped
-# in installclean between incremental builds on build servers.
-gpl_source_tgz := $(call intermediates-dir-for,PACKAGING,gpl_source)/gpl_source.tgz
-
-ALL_GPL_MODULE_LICENSE_FILES := $(sort $(ALL_GPL_MODULE_LICENSE_FILES))
-
-# FORCE since we can't know whether any of the sources changed
-$(gpl_source_tgz): PRIVATE_PATHS := $(sort $(patsubst %/, %, $(dir $(ALL_GPL_MODULE_LICENSE_FILES))))
-$(gpl_source_tgz) : $(ALL_GPL_MODULE_LICENSE_FILES)
-	@echo Package GPL sources: $@
-	$(hide) tar cfz $@ --exclude ".git*" $(PRIVATE_PATHS)
-
-# Dist the tgz only if we are doing a full build
-$(call dist-for-goals,droidcore-unbundled,$(gpl_source_tgz))
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
index e83d408..66ba8f1 100644
--- a/core/tasks/module-info.mk
+++ b/core/tasks/module-info.mk
@@ -18,6 +18,7 @@
 			'"test_config": [$(foreach w,$(strip $(ALL_MODULES.$(m).TEST_CONFIG) $(ALL_MODULES.$(m).EXTRA_TEST_CONFIGS)),"$(w)", )], ' \
 			'"dependencies": [$(foreach w,$(sort $(ALL_DEPS.$(m).ALL_DEPS)),"$(w)", )], ' \
 			'"shared_libs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SHARED_LIBS)),"$(w)", )], ' \
+			'"static_libs": [$(foreach w,$(sort $(ALL_MODULES.$(m).STATIC_LIBS)),"$(w)", )], ' \
 			'"system_shared_libs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SYSTEM_SHARED_LIBS)),"$(w)", )], ' \
 			'"srcs": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCS)),"$(w)", )], ' \
 			'"srcjars": [$(foreach w,$(sort $(ALL_MODULES.$(m).SRCJARS)),"$(w)", )], ' \
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 5097f12..7acac72 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -19,12 +19,13 @@
 addon_name := $(PRODUCT_SDK_ADDON_NAME)
 ifneq ($(addon_name),)
 
-addon_dir_leaf  := $(addon_name)-$(FILE_NAME_TAG)-$(INTERNAL_SDK_HOST_OS_NAME)
-addon_dir_img   := $(addon_dir_leaf)-img
-intermediates   := $(HOST_OUT_INTERMEDIATES)/SDK_ADDON/$(addon_name)_intermediates
-full_target     := $(HOST_OUT_SDK_ADDON)/$(addon_dir_leaf).zip
-full_target_img := $(HOST_OUT_SDK_ADDON)/$(addon_dir_img).zip
-staging         := $(intermediates)
+addon_dir_leaf        := $(addon_name)-$(INTERNAL_SDK_HOST_OS_NAME)
+addon_dir_img         := $(addon_dir_leaf)-img
+intermediates         := $(HOST_OUT_INTERMEDIATES)/SDK_ADDON/$(addon_name)_intermediates
+full_target           := $(HOST_OUT_SDK_ADDON)/$(addon_dir_leaf).zip
+full_target_dist_name := $(addon_name)-FILE_NAME_TAG_PLACEHOLDER-$(INTERNAL_SDK_HOST_OS_NAME)
+full_target_img       := $(HOST_OUT_SDK_ADDON)/$(addon_dir_img).zip
+staging               := $(intermediates)
 
 sdk_addon_deps :=
 files_to_copy :=
@@ -140,7 +141,7 @@
 else
 # When not building an sdk_repo, just dist the addon zip file
 # as-is.
-$(call dist-for-goals, sdk_addon, $(full_target))
+$(call dist-for-goals, sdk_addon, $(full_target):$(full_target_dist_name))
 endif
 
 else # addon_name
diff --git a/core/tasks/test_mapping.mk b/core/tasks/test_mapping.mk
index 0b0c93c..eb2a585 100644
--- a/core/tasks/test_mapping.mk
+++ b/core/tasks/test_mapping.mk
@@ -21,17 +21,17 @@
 intermediates := $(call intermediates-dir-for,PACKAGING,test_mapping)
 test_mappings_zip := $(intermediates)/test_mappings.zip
 test_mapping_list := $(OUT_DIR)/.module_paths/TEST_MAPPING.list
-test_mappings := $(file <$(test_mapping_list))
-$(test_mappings_zip) : PRIVATE_test_mappings := $(subst $(newline),\n,$(test_mappings))
 $(test_mappings_zip) : PRIVATE_all_disabled_presubmit_tests := $(ALL_DISABLED_PRESUBMIT_TESTS)
+$(test_mappings_zip) : PRIVATE_test_mapping_list := $(test_mapping_list)
 
-$(test_mappings_zip) : $(test_mappings) $(SOONG_ZIP)
+$(test_mappings_zip) : .KATI_DEPFILE := $(test_mappings_zip).d
+$(test_mappings_zip) : $(test_mapping_list) $(SOONG_ZIP)
 	@echo "Building artifact to include TEST_MAPPING files and tests to skip in presubmit check."
 	rm -rf $@ $(dir $@)/disabled-presubmit-tests
 	echo $(sort $(PRIVATE_all_disabled_presubmit_tests)) | tr " " "\n" > $(dir $@)/disabled-presubmit-tests
-	echo -e "$(PRIVATE_test_mappings)" > $@.list
-	$(SOONG_ZIP) -o $@ -C . -l $@.list -C $(dir $@) -f $(dir $@)/disabled-presubmit-tests
-	rm -f $@.list $(dir $@)/disabled-presubmit-tests
+	$(SOONG_ZIP) -o $@ -C . -l $(PRIVATE_test_mapping_list) -C $(dir $@) -f $(dir $@)/disabled-presubmit-tests
+	echo "$@ : " $$(cat $(PRIVATE_test_mapping_list)) > $@.d
+	rm -f $(dir $@)/disabled-presubmit-tests
 
 test_mapping : $(test_mappings_zip)
 
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 2626120..ba97e8a 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -105,6 +105,9 @@
 else ifneq (,$(filter true, $(CUSTOM_IMAGE_AVB_HASH_ENABLE) $(CUSTOM_IMAGE_AVB_HASHTREE_ENABLE)))
   $(error Cannot set both CUSTOM_IMAGE_AVB_HASH_ENABLE and CUSTOM_IMAGE_AVB_HASHTREE_ENABLE to true)
 endif
+ifeq ($(strip $(HAS_BUILD_NUMBER)),true)
+$(my_built_custom_image): $(BUILD_NUMBER_FILE)
+endif
 $(my_built_custom_image): $(INTERNAL_USERIMAGES_DEPS) $(my_built_modules) $(my_image_copy_files) $(my_custom_image_modules_dep) \
   $(CUSTOM_IMAGE_DICT_FILE)
 	@echo "Build image $@"
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 9400890..dd2305e 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -30,8 +30,6 @@
 out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
 test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
 test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/tradefed-no-fwk.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/tradefed-test-framework.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/loganalysis.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed.jar \
@@ -46,10 +44,16 @@
 
 # The JDK to package into the test suite zip file.  Always package the linux JDK.
 test_suite_jdk_dir := $(ANDROID_JAVA_HOME)/../linux-x86
+ifndef test_suite_jdk_files
+  # This file gets included many times, so make sure we only run the $(shell) once.
+  # Otherwise it will slow down every build due to all copies of it being rerun when kati
+  # checks the stamp file.
+  test_suite_jdk_files :=$= $(shell find $(test_suite_jdk_dir) -type f | sort)
+endif
 test_suite_jdk := $(call intermediates-dir-for,PACKAGING,$(test_suite_name)_jdk,HOST)/jdk.zip
 $(test_suite_jdk): PRIVATE_JDK_DIR := $(test_suite_jdk_dir)
 $(test_suite_jdk): PRIVATE_SUBDIR := $(test_suite_subdir)
-$(test_suite_jdk): $(shell find $(test_suite_jdk_dir) -type f | sort)
+$(test_suite_jdk): $(test_suite_jdk_files)
 $(test_suite_jdk): $(SOONG_ZIP)
 	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR) -sha256
 
@@ -114,6 +118,9 @@
 $(compatibility_zip): PRIVATE_JDK := $(test_suite_jdk)
 $(compatibility_zip): PRIVATE_tests_list := $(out_dir)-tests_list
 $(compatibility_zip): PRIVATE_tests_list_zip := $(compatibility_tests_list_zip)
+ifeq ($(strip $(HAS_BUILD_NUMBER)),true)
+$(compatibility_zip): $(BUILD_NUMBER_FILE)
+endif
 $(compatibility_zip): $(compatibility_zip_deps) | $(ADB) $(ACP)
 # Make dir structure
 	mkdir -p $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index c41aec5..b15df28 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -50,12 +50,12 @@
   $(error done)
 endif
 
-my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))
+my_missing_files = $(shell $(call echo-warning,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(shell$(call echo-warning,$(my_makefile),$(my_package_name): Some necessary modules may have been skipped by Soong. Check if PRODUCT_SOURCE_ROOT_DIRS is pruning necessary Android.bp files.))
 ifeq ($(ALLOW_MISSING_DEPENDENCIES),true)
   # Ignore unknown installed files on partial builds
   my_missing_files =
 else ifneq ($(my_modules_strict),false)
-  my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(eval my_missing_error := true)
+  my_missing_files = $(shell $(call echo-error,$(my_makefile),$(my_package_name): Unknown installed file for module '$(1)'))$(shell$(call echo-warning,$(my_makefile),$(my_package_name): Some necessary modules may have been skipped by Soong. Check if PRODUCT_SOURCE_ROOT_DIRS is pruning necessary Android.bp files.))$(eval my_missing_error := true)
 endif
 
 # Iterate over modules' built files and installed files;
diff --git a/core/tasks/with-license.mk b/core/tasks/with-license.mk
index d41e77a..5ca974a 100644
--- a/core/tasks/with-license.mk
+++ b/core/tasks/with-license.mk
@@ -20,7 +20,8 @@
 	name := $(name)_debug
 endif
 
-name := $(name)-flashable-$(FILE_NAME_TAG)-with-license
+dist_name := $(name)-flashable-FILE_NAME_TAG_PLACEHOLDER-with-license
+name := $(name)-flashable-with-license
 
 with_license_intermediates := \
 	$(call intermediates-dir-for,PACKAGING,with_license)
@@ -42,6 +43,7 @@
 $(call declare-container-deps,$(license_image_input_zip),$(BUILT_TARGET_FILES_PACKAGE))
 
 with_license_zip := $(PRODUCT_OUT)/$(name).sh
+dist_name := $(dist_name).sh
 $(with_license_zip): PRIVATE_NAME := $(name)
 $(with_license_zip): PRIVATE_INPUT_ZIP := $(license_image_input_zip)
 $(with_license_zip): PRIVATE_VENDOR_BLOBS_LICENSE := $(VENDOR_BLOBS_LICENSE)
@@ -51,7 +53,7 @@
 	$(HOST_OUT_EXECUTABLES)/generate-self-extracting-archive $@ \
 		$(PRIVATE_INPUT_ZIP) $(PRIVATE_NAME) $(PRIVATE_VENDOR_BLOBS_LICENSE)
 with-license : $(with_license_zip)
-$(call dist-for-goals, with-license, $(with_license_zip))
+$(call dist-for-goals, with-license, $(with_license_zip):$(dist_name))
 
 $(call declare-1p-container,$(with_license_zip),)
 $(call declare-container-license-deps,$(with_license_zip),$(license_image_input_zip),$(with_license_zip):)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index a664b9d..4a42783 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,10 +40,9 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := UP1A
-.KATI_READONLY := DEFAULT_PLATFORM_VERSION
+# Set release configuration. The default resides in build/release/build_flags.mk.
 MIN_PLATFORM_VERSION := UP1A
-MAX_PLATFORM_VERSION := UP1A
+MAX_PLATFORM_VERSION := VP1A
 
 # The last stable version name of the platform that was released.  During
 # development, this stays at that previous version, while the codename indicates
@@ -54,6 +53,7 @@
 # These are the current development codenames, if the build is not a final
 # release build.  If this is a final release build, it is simply "REL".
 PLATFORM_VERSION_CODENAME.UP1A := UpsideDownCake
+PLATFORM_VERSION_CODENAME.VP1A := VanillaIceCream
 
 # This is the user-visible version.  In a final release build it should
 # be empty to use PLATFORM_VERSION as the user-visible version.  For
@@ -90,7 +90,7 @@
 Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
 Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
 JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
-Q R S Sv2 Tiramisu UpsideDownCake
+Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
 
 # Convert from space separated list to comma separated
 PLATFORM_VERSION_KNOWN_CODENAMES := \
@@ -103,7 +103,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-02-05
+    PLATFORM_SECURITY_PATCH := 2023-05-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/core/version_util.mk b/core/version_util.mk
index cbfef96..d3fcdc2 100644
--- a/core/version_util.mk
+++ b/core/version_util.mk
@@ -14,17 +14,17 @@
 # limitations under the License.
 #
 
-#
-
 ALLOWED_VERSIONS := $(call allowed-platform-versions,\
   $(MIN_PLATFORM_VERSION),\
   $(MAX_PLATFORM_VERSION),\
-  $(DEFAULT_PLATFORM_VERSION))
+  $(RELEASE_PLATFORM_VERSION))
 
-ifndef TARGET_PLATFORM_VERSION
-  TARGET_PLATFORM_VERSION := $(DEFAULT_PLATFORM_VERSION)
+ifdef TARGET_PLATFORM_VERSION
+  $(error Do not set TARGET_PLATFORM_VERSION directly. Use RELEASE_PLATFORM_VERSION. value: $(TARGET_PLATFORM_VERSION))
 endif
 
+TARGET_PLATFORM_VERSION := $(RELEASE_PLATFORM_VERSION)
+
 ifeq (,$(filter $(ALLOWED_VERSIONS), $(TARGET_PLATFORM_VERSION)))
   $(warning Invalid TARGET_PLATFORM_VERSION '$(TARGET_PLATFORM_VERSION)', must be one of)
   $(error $(ALLOWED_VERSIONS))
@@ -80,13 +80,28 @@
   $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_CODENAMES)),,\
     $(eval PLATFORM_VERSION_ALL_CODENAMES += $(_codename))))
 
+# And the list of actually all the codenames that are in preview. The
+# ALL_CODENAMES variable is sort of a lie for historical reasons and only
+# includes codenames up to and including the currently active codename, whereas
+# this variable also includes future codenames. For example, while AOSP is still
+# merging into U, but V development has started, ALL_CODENAMES will only be U,
+# but ALL_PREVIEW_CODENAMES will be U and V.
+PLATFORM_VERSION_ALL_PREVIEW_CODENAMES :=
+$(foreach version,$(ALL_VERSIONS),\
+  $(eval _codename := $(PLATFORM_VERSION_CODENAME.$(version)))\
+  $(if $(filter $(_codename),$(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)),,\
+    $(eval PLATFORM_VERSION_ALL_PREVIEW_CODENAMES += $(_codename))))
+
 # And convert from space separated to comma separated.
 PLATFORM_VERSION_ALL_CODENAMES := \
   $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_CODENAMES)))
+PLATFORM_VERSION_ALL_PREVIEW_CODENAMES := \
+  $(subst $(space),$(comma),$(strip $(PLATFORM_VERSION_ALL_PREVIEW_CODENAMES)))
 
 .KATI_READONLY := \
   PLATFORM_VERSION_CODENAME \
-  PLATFORM_VERSION_ALL_CODENAMES
+  PLATFORM_VERSION_ALL_CODENAMES \
+  PLATFORM_VERSION_ALL_PREVIEW_CODENAMES \
 
 ifneq (REL,$(PLATFORM_VERSION_CODENAME))
   codenames := \
@@ -231,21 +246,10 @@
 # to soong_ui.
 $(KATI_obsolete_var BUILD_DATETIME,Use BUILD_DATETIME_FROM_FILE)
 
-HAS_BUILD_NUMBER := true
-ifndef BUILD_NUMBER
-  # BUILD_NUMBER should be set to the source control value that
-  # represents the current state of the source code.  E.g., a
-  # perforce changelist number or a git hash.  Can be an arbitrary string
-  # (to allow for source control that uses something other than numbers),
-  # but must be a single word and a valid file name.
-  #
-  # If no BUILD_NUMBER is set, create a useful "I am an engineering build
-  # from this date/time" value.  Make it start with a non-digit so that
-  # anyone trying to parse it as an integer will probably get "0".
-  BUILD_NUMBER := eng.$(shell echo $${BUILD_USERNAME:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+ifndef HAS_BUILD_NUMBER
   HAS_BUILD_NUMBER := false
 endif
-.KATI_READONLY := BUILD_NUMBER HAS_BUILD_NUMBER
+.KATI_READONLY := HAS_BUILD_NUMBER
 
 ifndef PLATFORM_MIN_SUPPORTED_TARGET_SDK_VERSION
   # Used to set minimum supported target sdk version. Apps targeting sdk
diff --git a/envsetup.sh b/envsetup.sh
index 905635c..d292dbb 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -312,7 +312,7 @@
     # would prevent exporting type info from those packages.
     #
     # http://b/266688086
-    export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages:
+    export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages/gdbrunner:$T/development/python-packages:
     if [ -n $VENDOR_PYTHONPATH ]; then
         ANDROID_PYTHONPATH=$ANDROID_PYTHONPATH$VENDOR_PYTHONPATH
     fi
@@ -804,13 +804,19 @@
 
     export TARGET_BUILD_APPS=
 
-    local product variant_and_version variant version
+    # Support either <product>-<variant> or <product>-<release>-<variant>
+    local product release_and_variant release variant
     product=${selection%%-*} # Trim everything after first dash
-    variant_and_version=${selection#*-} # Trim everything up to first dash
-    if [ "$variant_and_version" != "$selection" ]; then
-        variant=${variant_and_version%%-*}
-        if [ "$variant" != "$variant_and_version" ]; then
-            version=${variant_and_version#*-}
+    release_and_variant=${selection#*-} # Trim everything up to first dash
+    if [ "$release_and_variant" != "$selection" ]; then
+        local first=${release_and_variant%%-*} # Trim everything after first dash
+        if [ "$first" != "$release_and_variant" ]; then
+            # There is a 2nd dash, split into release-variant
+            release=$first # Everything up to the dash
+            variant=${release_and_variant#*-} # Trim everything up to dash
+        else
+            # There is not a 2nd dash, default to variant as the second param
+            variant=$first
         fi
     fi
 
@@ -823,7 +829,7 @@
 
     TARGET_PRODUCT=$product \
     TARGET_BUILD_VARIANT=$variant \
-    TARGET_PLATFORM_VERSION=$version \
+    TARGET_RELEASE=$release \
     build_build_var_cache
     if [ $? -ne 0 ]
     then
@@ -835,10 +841,10 @@
     fi
     export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
     export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
-    if [ -n "$version" ]; then
-      export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+    if [ -n "$release" ]; then
+      export TARGET_RELEASE=$release
     else
-      unset TARGET_PLATFORM_VERSION
+      unset TARGET_RELEASE
     fi
     export TARGET_BUILD_TYPE=release
 
@@ -1096,12 +1102,12 @@
 #
 # Easy way to make system.img/etc writable
 function syswrite() {
-  adb wait-for-device && adb root || return 1
+  adb wait-for-device && adb root && adb wait-for-device || return 1
   if [[ $(adb disable-verity | grep -i "reboot") ]]; then
       echo "rebooting"
-      adb reboot && adb wait-for-device && adb root || return 1
+      adb reboot && adb wait-for-device && adb root && adb wait-for-device || return 1
   fi
-  adb wait-for-device && adb remount || return 1
+  adb remount || return 1
 }
 
 # coredump_setup - enable core dumps globally for any process
diff --git a/packaging/distdir.mk b/packaging/distdir.mk
index 264a8b0..c9508af 100644
--- a/packaging/distdir.mk
+++ b/packaging/distdir.mk
@@ -18,10 +18,12 @@
 DIST_GOAL_OUTPUT_PAIRS :=
 DIST_SRC_DST_PAIRS :=
 include $(KATI_PACKAGE_MK_DIR)/dist.mk
+FILE_NAME_TAG := $(file <$(OUT_DIR)/file_name_tag.txt)
+.KATI_READONLY := FILE_NAME_TAG
 
 $(foreach pair,$(DIST_GOAL_OUTPUT_PAIRS), \
   $(eval goal := $(call word-colon,1,$(pair))) \
-  $(eval output := $(call word-colon,2,$(pair))) \
+  $(eval output := $(subst FILE_NAME_TAG_PLACEHOLDER,$(FILE_NAME_TAG),$(call word-colon,2,$(pair)))) \
   $(eval .PHONY: _dist_$$(goal)) \
   $(if $(call streq,$(DIST),true),\
     $(eval _dist_$$(goal): $$(DIST_DIR)/$$(output)), \
@@ -37,7 +39,7 @@
 ifeq ($(DIST),true)
   $(foreach pair,$(DIST_SRC_DST_PAIRS), \
     $(eval src := $(call word-colon,1,$(pair))) \
-    $(eval dst := $(DIST_DIR)/$(call word-colon,2,$(pair))) \
+    $(eval dst := $(subst FILE_NAME_TAG_PLACEHOLDER,$(FILE_NAME_TAG),$(DIST_DIR)/$(call word-colon,2,$(pair)))) \
     $(eval $(call copy-one-dist-file,$(src),$(dst))))
 endif
 
diff --git a/target/board/BoardConfigEmuCommon.mk b/target/board/BoardConfigEmuCommon.mk
index 7a07d70..6ed08f0 100644
--- a/target/board/BoardConfigEmuCommon.mk
+++ b/target/board/BoardConfigEmuCommon.mk
@@ -26,51 +26,34 @@
 # Emulator doesn't support sparse image format.
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 
-ifeq ($(PRODUCT_USE_DYNAMIC_PARTITIONS),true)
-  # emulator is Non-A/B device
-  AB_OTA_UPDATER := false
+# emulator is Non-A/B device
+AB_OTA_UPDATER := false
 
-  # emulator needs super.img
-  BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
+# emulator needs super.img
+BOARD_BUILD_SUPER_IMAGE_BY_DEFAULT := true
 
-  # 8G + 8M
-  BOARD_SUPER_PARTITION_SIZE ?= 8598323200
-  BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
+# 8G + 8M
+BOARD_SUPER_PARTITION_SIZE ?= 8598323200
+BOARD_SUPER_PARTITION_GROUPS := emulator_dynamic_partitions
 
-  ifeq ($(QEMU_USE_SYSTEM_EXT_PARTITIONS),true)
-    BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
-        system \
-        system_ext \
-        product \
-        vendor
+BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
+  system \
+  system_dlkm \
+  system_ext \
+  product \
+  vendor
 
-    TARGET_COPY_OUT_PRODUCT := product
-    BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
-    TARGET_COPY_OUT_SYSTEM_EXT := system_ext
-    BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
-  else
-    TARGET_COPY_OUT_PRODUCT := system/product
-    TARGET_COPY_OUT_SYSTEM_EXT := system/system_ext
-    BOARD_EMULATOR_DYNAMIC_PARTITIONS_PARTITION_LIST := \
-        system \
-        vendor
-  endif
+TARGET_COPY_OUT_PRODUCT := product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+TARGET_COPY_OUT_SYSTEM_EXT := system_ext
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
 
-  # 8G
-  BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE ?= 8589934592
+BOARD_USES_SYSTEM_DLKMIMAGE := true
+BOARD_SYSTEM_DLKMIMAGE_FILE_SYSTEM_TYPE := erofs
+TARGET_COPY_OUT_SYSTEM_DLKM := system_dlkm
 
-  # in build environment to speed up make -j
-  ifeq ($(QEMU_DISABLE_AVB),true)
-    BOARD_AVB_ENABLE := false
-  endif
-else ifeq ($(PRODUCT_USE_DYNAMIC_PARTITION_SIZE),true)
-  # Enable dynamic system image size and reserved 64MB in it.
-  BOARD_SYSTEMIMAGE_PARTITION_RESERVED_SIZE := 67108864
-  BOARD_VENDORIMAGE_PARTITION_RESERVED_SIZE := 67108864
-else
-  BOARD_SYSTEMIMAGE_PARTITION_SIZE := 3221225472
-  BOARD_VENDORIMAGE_PARTITION_SIZE := 146800640
-endif
+# 8G
+BOARD_EMULATOR_DYNAMIC_PARTITIONS_SIZE ?= 8589934592
 
 #vendor boot
 BOARD_INCLUDE_DTB_IN_BOOTIMG := false
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 4d95b33..67e31df 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -36,6 +36,7 @@
 TARGET_COPY_OUT_PRODUCT := system/product
 TARGET_COPY_OUT_SYSTEM_EXT := system/system_ext
 BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE :=
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE :=
 
 # Creates metadata partition mount point under root for
 # the devices with metadata parition
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 00f6e5b..01ebe56 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -14,6 +14,8 @@
 TARGET_COPY_OUT_SYSTEM_EXT := system_ext
 TARGET_COPY_OUT_VENDOR := vendor
 TARGET_COPY_OUT_PRODUCT := product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
 
 # Creates metadata partition mount point under root for
 # the devices with metadata parition
@@ -22,9 +24,6 @@
 # Default is current, but allow devices to override vndk version if needed.
 BOARD_VNDK_VERSION ?= current
 
-# Required flag for non-64 bit devices from P.
-TARGET_USES_64_BIT_BINDER := true
-
 # 64 bit mediadrmserver
 TARGET_ENABLE_MEDIADRM_64 := true
 
diff --git a/target/board/BoardConfigModuleCommon.mk b/target/board/BoardConfigModuleCommon.mk
deleted file mode 100644
index 24c01a5..0000000
--- a/target/board/BoardConfigModuleCommon.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-# BoardConfigModuleCommon.mk
-#
-# Common compile-time settings for module builds.
-
-# Required for all module devices.
-TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
index dc84192..d221e64 100644
--- a/target/board/emulator_arm64/device.mk
+++ b/target/board/emulator_arm64/device.mk
@@ -17,12 +17,3 @@
 PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
 PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
 
-# Cuttlefish has GKI kernel prebuilts, so use those for the GKI boot.img.
-ifeq ($(TARGET_PREBUILT_KERNEL),)
-    LOCAL_KERNEL := kernel/prebuilts/5.4/arm64/kernel-5.4-lz4
-else
-    LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
-endif
-
-PRODUCT_COPY_FILES += \
-    $(LOCAL_KERNEL):kernel
diff --git a/target/board/generic_64bitonly_x86_64/BoardConfig.mk b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
index 71c4357..a240eab 100644
--- a/target/board/generic_64bitonly_x86_64/BoardConfig.mk
+++ b/target/board/generic_64bitonly_x86_64/BoardConfig.mk
@@ -26,7 +26,10 @@
 TARGET_2ND_ARCH_VARIANT := x86_64
 
 TARGET_PRELINK_MODULE := false
+
 include build/make/target/board/BoardConfigGsiCommon.mk
+
+ifndef BUILDING_GSI
 include build/make/target/board/BoardConfigEmuCommon.mk
 
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
@@ -43,3 +46,5 @@
 WIFI_DRIVER_FW_PATH_PARAM   := "/dev/null"
 WIFI_DRIVER_FW_PATH_STA     := "/dev/null"
 WIFI_DRIVER_FW_PATH_AP      := "/dev/null"
+
+endif # !BUILDING_GSI
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 40be80e..e2d5fb4 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -54,6 +54,8 @@
 
 # Include 64-bit mediaserver to support 64-bit only devices
 TARGET_DYNAMIC_64_32_MEDIASERVER := true
+# Include 64-bit drmserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_DRMSERVER := true
 
 include build/make/target/board/BoardConfigGsiCommon.mk
 
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index e7f2ae0..36136f4 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -24,6 +24,8 @@
 
 # Include 64-bit mediaserver to support 64-bit only devices
 TARGET_DYNAMIC_64_32_MEDIASERVER := true
+# Include 64-bit drmserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_DRMSERVER := true
 
 include build/make/target/board/BoardConfigGsiCommon.mk
 
diff --git a/target/board/gsi_arm64/BoardConfig.mk b/target/board/gsi_arm64/BoardConfig.mk
index db95082..7910b1d 100644
--- a/target/board/gsi_arm64/BoardConfig.mk
+++ b/target/board/gsi_arm64/BoardConfig.mk
@@ -29,6 +29,8 @@
 
 # Include 64-bit mediaserver to support 64-bit only devices
 TARGET_DYNAMIC_64_32_MEDIASERVER := true
+# Include 64-bit drmserver to support 64-bit only devices
+TARGET_DYNAMIC_64_32_DRMSERVER := true
 
 # TODO(b/111434759, b/111287060) SoC specific hacks
 BOARD_ROOT_EXTRA_SYMLINKS += /vendor/lib/dsp:/dsp
diff --git a/target/board/linux_bionic/BoardConfig.mk b/target/board/linux_bionic/BoardConfig.mk
index 7938bdb..7fca911 100644
--- a/target/board/linux_bionic/BoardConfig.mk
+++ b/target/board/linux_bionic/BoardConfig.mk
@@ -17,7 +17,11 @@
 # (device) target architectures are irrelevant. However, the build system isn't
 # prepared to handle no target architectures at all, so pick something
 # arbitrarily.
-TARGET_ARCH_SUITE := ndk
+TARGET_ARCH := arm
+TARGET_ARCH_VARIANT := armv7-a-neon
+TARGET_CPU_VARIANT := generic
+TARGET_CPU_ABI := armeabi-v7a
+TARGET_CPU_ABI2 := armeabi
 
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := x86_64
diff --git a/target/board/mainline_sdk/BoardConfig.mk b/target/board/mainline_sdk/BoardConfig.mk
index f5c2dc6..84f8b2d 100644
--- a/target/board/mainline_sdk/BoardConfig.mk
+++ b/target/board/mainline_sdk/BoardConfig.mk
@@ -18,6 +18,3 @@
 HOST_CROSS_OS := linux_bionic
 HOST_CROSS_ARCH := x86_64
 HOST_CROSS_2ND_ARCH :=
-
-# Required flag for non-64 bit devices from P.
-TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/module_arm/BoardConfig.mk b/target/board/module_arm/BoardConfig.mk
index 3f35c06..565efc8 100644
--- a/target/board/module_arm/BoardConfig.mk
+++ b/target/board/module_arm/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_ARCH := arm
 TARGET_ARCH_VARIANT := armv7-a-neon
 TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_arm64/BoardConfig.mk b/target/board/module_arm64/BoardConfig.mk
index 3700056..66e3792 100644
--- a/target/board/module_arm64/BoardConfig.mk
+++ b/target/board/module_arm64/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
 TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_arm64only/BoardConfig.mk b/target/board/module_arm64only/BoardConfig.mk
index 3cabf05..6c26579 100644
--- a/target/board/module_arm64only/BoardConfig.mk
+++ b/target/board/module_arm64only/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_ARCH := arm64
 TARGET_ARCH_VARIANT := armv8-a
 TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_x86/BoardConfig.mk b/target/board/module_x86/BoardConfig.mk
index a93ac97..af3fffd 100644
--- a/target/board/module_x86/BoardConfig.mk
+++ b/target/board/module_x86/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_CPU_ABI := x86
 TARGET_ARCH := x86
 TARGET_ARCH_VARIANT := x86
diff --git a/target/board/module_x86_64/BoardConfig.mk b/target/board/module_x86_64/BoardConfig.mk
index 1ed3be0..1ada027 100644
--- a/target/board/module_x86_64/BoardConfig.mk
+++ b/target/board/module_x86_64/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_CPU_ABI := x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/module_x86_64only/BoardConfig.mk b/target/board/module_x86_64only/BoardConfig.mk
index b0676cb..5b86f0a 100644
--- a/target/board/module_x86_64only/BoardConfig.mk
+++ b/target/board/module_x86_64only/BoardConfig.mk
@@ -13,8 +13,6 @@
 # limitations under the License.
 #
 
-include build/make/target/board/BoardConfigModuleCommon.mk
-
 TARGET_CPU_ABI := x86_64
 TARGET_ARCH := x86_64
 TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/ndk/BoardConfig.mk b/target/board/ndk/BoardConfig.mk
index da8b5f3..b485f8b 100644
--- a/target/board/ndk/BoardConfig.mk
+++ b/target/board/ndk/BoardConfig.mk
@@ -14,7 +14,6 @@
 #
 
 TARGET_ARCH_SUITE := ndk
-TARGET_USES_64_BIT_BINDER := true
 
 MALLOC_SVELTE := true
 
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index 1e0ce19..133dc73 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -35,6 +35,7 @@
 ifneq ($(TARGET_BUILD_APPS),)
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_arm64.mk \
+    $(LOCAL_DIR)/aosp_arm64_fullmte.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
@@ -46,6 +47,7 @@
 PRODUCT_MAKEFILES := \
     $(LOCAL_DIR)/aosp_64bitonly_x86_64.mk \
     $(LOCAL_DIR)/aosp_arm64.mk \
+    $(LOCAL_DIR)/aosp_arm64_fullmte.mk \
     $(LOCAL_DIR)/aosp_arm.mk \
     $(LOCAL_DIR)/aosp_riscv64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
diff --git a/target/product/angle_default.mk b/target/product/angle_default.mk
new file mode 100644
index 0000000..bea0be6
--- /dev/null
+++ b/target/product/angle_default.mk
@@ -0,0 +1,23 @@
+#
+# Copyright 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# To enable ANGLE as the default system GLES drivers, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_enabled.mk) to the Makefile.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk)
+
+PRODUCT_VENDOR_PROPERTIES += \
+    persist.graphics.egl=angle
diff --git a/target/product/angle_supported.mk b/target/product/angle_supported.mk
new file mode 100644
index 0000000..c83ff5f
--- /dev/null
+++ b/target/product/angle_supported.mk
@@ -0,0 +1,27 @@
+#
+# Copyright 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# To include ANGLE into the image build, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk) to the Makefile.
+# By default, this will allow ANGLE binaries to coexist with native GLES drivers.
+
+PRODUCT_PACKAGES += \
+    libEGL_angle \
+    libGLESv1_CM_angle \
+    libGLESv2_angle
+
+# Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
+PRODUCT_VENDOR_PROPERTIES += ro.gfx.angle.supported=true
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index 4de4e0c..75fd3c8 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -51,7 +51,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 
@@ -59,6 +58,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 5f200aa..61c1316 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -57,6 +57,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_arm,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index ffc37a9..6c907db 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -62,6 +62,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_arm64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_arm64_fullmte.mk b/target/product/aosp_arm64_fullmte.mk
new file mode 100644
index 0000000..ed6bd4a
--- /dev/null
+++ b/target/product/aosp_arm64_fullmte.mk
@@ -0,0 +1,27 @@
+# Copyright (C) 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+include $(SRC_TARGET_DIR)/product/fullmte.mk
+
+PRODUCT_ENFORCE_ARTIFACT_PATH_REQUIREMENTS := relaxed
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_arm64.mk)
+
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
+
+PRODUCT_NAME := aosp_arm64_fullmte
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index 1261fb1..270a989 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -30,8 +30,7 @@
 
 # GSI for system/product & support 64-bit apps only
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
-#$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
-TARGET_FLATTEN_APEX := false
+$(call inherit-product, $(SRC_TARGET_DIR)/product/mainline_system.mk)
 
 #
 # All components inherited here go to system_ext image
@@ -47,7 +46,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
 
@@ -55,30 +53,21 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_riscv64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
-# TODO: this list should come via mainline_system.mk, but for now list
-# just the modules that work for riscv64.
-PRODUCT_PACKAGES := \
-  init.environ.rc \
-  init_first_stage \
-  init_system \
-  linker \
-  shell_and_utilities \
-  com.android.art \
-  com.android.conscrypt \
-  com.android.i18n \
-  com.android.runtime \
-  com.android.tzdata \
-  com.android.os.statsd \
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
-PRODUCT_USES_DEFAULT_ART_CONFIG := false
-
 PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
     root/init.zygote64.rc
 
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
 # This build configuration supports 64-bit apps only
 PRODUCT_NAME := aosp_riscv64
 PRODUCT_DEVICE := generic_riscv64
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 7db2c0f..a2f0390 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -47,7 +47,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
@@ -56,6 +55,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_x86,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index d55866f..535ee3f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -56,7 +56,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
@@ -65,6 +64,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index f96e068..39ad0d8 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -45,7 +45,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_arm/device.mk)
 
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 94b5c16..a23fdd5 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -226,7 +226,7 @@
     mtpd \
     ndc \
     netd \
-    NetworkStackNext \
+    NetworkStack \
     odsign \
     org.apache.http.legacy \
     otacerts \
@@ -265,7 +265,6 @@
     sm \
     snapshotctl \
     snapuserd \
-    SoundPicker \
     storaged \
     surfaceflinger \
     svc \
@@ -291,16 +290,21 @@
     wifi.rc \
     wm \
 
+# These packages are not used on Android TV
+ifneq ($(PRODUCT_IS_ATV),true)
+  PRODUCT_PACKAGES += \
+      SoundPicker \
+
+endif
+
 # VINTF data for system image
 PRODUCT_PACKAGES += \
     system_manifest.xml \
     system_compatibility_matrix.xml \
 
-# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
-ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
-  PRODUCT_PACKAGES += \
-   libclang_rt.hwasan.bootstrap
-endif
+PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
+ libclang_rt.hwasan.bootstrap \
+ libc_hwasan \
 
 # Jacoco agent JARS to be built and installed, if any.
 ifeq ($(EMMA_INSTRUMENT),true)
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 752b199..1e28c80 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -115,4 +115,4 @@
     dalvik.vm.dex2oat-Xms=64m \
     dalvik.vm.dex2oat-Xmx=512m \
 
-PRODUCT_ENABLE_UFFD_GC := false  # TODO(jiakaiz): Change this to "default".
+PRODUCT_ENABLE_UFFD_GC := default
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
new file mode 100644
index 0000000..d47c685
--- /dev/null
+++ b/target/product/fullmte.mk
@@ -0,0 +1,26 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Enables more comprehensive detection of memory errors on hardware that
+# supports the ARM Memory Tagging Extension (MTE), by building the image with
+# MTE stack instrumentation and forcing MTE on in SYNC mode in all processes.
+# For more details, see:
+# https://source.android.com/docs/security/test/memory-safety/arm-mte
+ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),)
+  SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack)
+  SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
+endif
+PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
diff --git a/target/product/gsi/33.txt b/target/product/gsi/33.txt
index db05f93..03a143d 100644
--- a/target/product/gsi/33.txt
+++ b/target/product/gsi/33.txt
@@ -79,6 +79,7 @@
 VNDK-core: android.hardware.graphics.allocator@4.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@1.0.so
 VNDK-core: android.hardware.graphics.bufferqueue@2.0.so
+VNDK-core: android.hardware.health-V1-ndk.so
 VNDK-core: android.hardware.health.storage-V1-ndk.so
 VNDK-core: android.hardware.identity-V4-ndk.so
 VNDK-core: android.hardware.ir-V1-ndk.so
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 107c94f..86d4622 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -33,9 +33,6 @@
 check-vndk-list: ;
 else ifeq ($(TARGET_SKIP_CURRENT_VNDK),true)
 check-vndk-list: ;
-else ifeq ($(BOARD_VNDK_VERSION),)
-# b/143233626 do not check vndk-list when vndk libs are not built
-check-vndk-list: ;
 else
 check-vndk-list: $(check-vndk-list-timestamp)
 ifneq ($(SKIP_ABI_CHECKS),true)
@@ -172,8 +169,6 @@
 #####################################################################
 # VNDK package and snapshot.
 
-ifneq ($(BOARD_VNDK_VERSION),)
-
 include $(CLEAR_VARS)
 LOCAL_MODULE := vndk_package
 LOCAL_LICENSE_KINDS := SPDX-license-identifier-Apache-2.0
@@ -214,8 +209,6 @@
 
 _vndk_versions :=
 
-endif # BOARD_VNDK_VERSION is set
-
 #####################################################################
 # skip_mount.cfg, read by init to skip mounting some partitions when GSI is used.
 
diff --git a/target/product/gsi/current.txt b/target/product/gsi/current.txt
index 474cb20..2df85e5 100644
--- a/target/product/gsi/current.txt
+++ b/target/product/gsi/current.txt
@@ -21,6 +21,7 @@
 LLNDK: libvulkan.so
 VNDK-SP: android.hardware.common-V2-ndk.so
 VNDK-SP: android.hardware.common.fmq-V1-ndk.so
+VNDK-SP: android.hardware.graphics.allocator-V2-ndk.so
 VNDK-SP: android.hardware.graphics.common-V4-ndk.so
 VNDK-SP: android.hardware.graphics.common@1.0.so
 VNDK-SP: android.hardware.graphics.common@1.1.so
@@ -30,7 +31,6 @@
 VNDK-SP: android.hardware.graphics.mapper@2.1.so
 VNDK-SP: android.hardware.graphics.mapper@3.0.so
 VNDK-SP: android.hardware.graphics.mapper@4.0.so
-VNDK-SP: android.hardware.graphics.allocator-V2-ndk.so
 VNDK-SP: android.hardware.renderscript@1.0.so
 VNDK-SP: android.hidl.memory.token@1.0.so
 VNDK-SP: android.hidl.memory@1.0-impl.so
@@ -90,7 +90,6 @@
 VNDK-core: libcrypto.so
 VNDK-core: libcrypto_utils.so
 VNDK-core: libcurl.so
-VNDK-core: libdiskconfig.so
 VNDK-core: libdumpstateutil.so
 VNDK-core: libevent.so
 VNDK-core: libexif.so
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 09d4bc9..3b97792 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -36,7 +36,7 @@
 # GSI should always support up-to-date platform features.
 # Keep this value at the latest API level to ensure latest build system
 # default configs are applied.
-PRODUCT_SHIPPING_API_LEVEL := 31
+PRODUCT_SHIPPING_API_LEVEL := 34
 
 # Enable dynamic partitions to facilitate mixing onto Cuttlefish
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
@@ -88,9 +88,6 @@
 PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false
 PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true
 
-# Always build modules from source
-MODULE_BUILD_FROM_SOURCE := true
-
 # Additional settings used in all GSI builds
 PRODUCT_PRODUCT_PROPERTIES += \
     ro.crypto.metadata_init_delete_all_keys.enabled=false \
diff --git a/target/product/handheld_system.mk b/target/product/handheld_system.mk
index 41233b2..d965367 100644
--- a/target/product/handheld_system.mk
+++ b/target/product/handheld_system.mk
@@ -27,6 +27,7 @@
 $(call inherit-product-if-exists, external/google-fonts/source-sans-pro/fonts.mk)
 $(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
 $(call inherit-product-if-exists, external/roboto-fonts/fonts.mk)
+$(call inherit-product-if-exists, external/roboto-flex-fonts/fonts.mk)
 $(call inherit-product-if-exists, external/hyphenation-patterns/patterns.mk)
 $(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
 $(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
diff --git a/target/product/module_arm.mk b/target/product/module_arm.mk
index d99dce8..434f7ad 100644
--- a/target/product/module_arm.mk
+++ b/target/product/module_arm.mk
@@ -17,5 +17,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
 
 PRODUCT_NAME := module_arm
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm
diff --git a/target/product/module_arm64.mk b/target/product/module_arm64.mk
index fc9529c..2e8c8a7 100644
--- a/target/product/module_arm64.mk
+++ b/target/product/module_arm64.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 
 PRODUCT_NAME := module_arm64
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm64
diff --git a/target/product/module_arm64only.mk b/target/product/module_arm64only.mk
index 4e8d53e..c0769bf 100644
--- a/target/product/module_arm64only.mk
+++ b/target/product/module_arm64only.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
 
 PRODUCT_NAME := module_arm64only
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_arm64only
diff --git a/target/product/module_common.mk b/target/product/module_common.mk
index ec670ee..84bd799 100644
--- a/target/product/module_common.mk
+++ b/target/product/module_common.mk
@@ -30,3 +30,5 @@
 ifneq (,$(strip $(wildcard frameworks/base/Android.bp)))
   UNBUNDLED_BUILD_SDKS_FROM_SOURCE := true
 endif
+
+PRODUCT_BRAND := Android
diff --git a/target/product/module_x86.mk b/target/product/module_x86.mk
index b852e7a..f38e2b9 100644
--- a/target/product/module_x86.mk
+++ b/target/product/module_x86.mk
@@ -17,5 +17,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/module_common.mk)
 
 PRODUCT_NAME := module_x86
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86
diff --git a/target/product/module_x86_64.mk b/target/product/module_x86_64.mk
index f6bc1fc..20f443a 100644
--- a/target/product/module_x86_64.mk
+++ b/target/product/module_x86_64.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 
 PRODUCT_NAME := module_x86_64
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86_64
diff --git a/target/product/module_x86_64only.mk b/target/product/module_x86_64only.mk
index bca4541..b0d72bf 100644
--- a/target/product/module_x86_64only.mk
+++ b/target/product/module_x86_64only.mk
@@ -18,5 +18,4 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit_only.mk)
 
 PRODUCT_NAME := module_x86_64only
-PRODUCT_BRAND := Android
 PRODUCT_DEVICE := module_x86_64only
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index 4203d45..3f81615 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index 888505b..48a0e3b 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index a324e5f..0f8b508 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index ff9018d..f5d9028 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -13,7 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-QEMU_USE_SYSTEM_EXT_PARTITIONS := true
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
 
 # This is a build configuration for a full-featured build of the
diff --git a/target/product/security/BUILD.bazel b/target/product/security/BUILD.bazel
new file mode 100644
index 0000000..c12be79
--- /dev/null
+++ b/target/product/security/BUILD.bazel
@@ -0,0 +1,8 @@
+filegroup(
+    name = "android_certificate_directory",
+    srcs = glob([
+        "*.pk8",
+        "*.pem",
+    ]),
+    visibility = ["//visibility:public"],
+)
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 418aaa4..f862485 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -20,3 +20,5 @@
 #
 # All U+ launching devices should instead use vabc_features.mk.
 $(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
+
+PRODUCT_VIRTUAL_AB_COW_VERSION := 2
diff --git a/target/product/window_extensions.mk b/target/product/window_extensions.mk
new file mode 100644
index 0000000..5f5431f
--- /dev/null
+++ b/target/product/window_extensions.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# /system_ext packages
+PRODUCT_PACKAGES += \
+    androidx.window.extensions \
+    androidx.window.sidecar
+
+# properties
+PRODUCT_PRODUCT_PROPERTIES += \
+    persist.wm.extensions.enabled=true
diff --git a/tests/b_tests.sh b/tests/b_tests.sh
index 13f156d..491d762 100755
--- a/tests/b_tests.sh
+++ b/tests/b_tests.sh
@@ -24,6 +24,7 @@
 test_target=//build/bazel/scripts/difftool:difftool
 
 b build "$test_target"
+b build -- "$test_target"
 b build "$test_target" --run-soong-tests
 b build --run-soong-tests "$test_target"
 b --run-soong-tests build "$test_target"
diff --git a/tests/lunch_tests.sh b/tests/lunch_tests.sh
index 4285d13..9b142ee 100755
--- a/tests/lunch_tests.sh
+++ b/tests/lunch_tests.sh
@@ -28,7 +28,7 @@
     [ "$TARGET_PLATFORM_VERSION" = "$4" ] || ( echo "lunch $1: expected TARGET_PLATFORM_VERSION='$4', got '$TARGET_PLATFORM_VERSION'" && exit 1 )
 )
 
-default_version=$(get_build_var DEFAULT_PLATFORM_VERSION)
+default_version=$(get_build_var RELEASE_PLATFORM_VERSION)
 
 # lunch tests
 check_lunch "aosp_arm64"                                "aosp_arm64" "eng"       ""
diff --git a/tests/product.rbc b/tests/product.rbc
index 9ae6393..b4c6d45 100644
--- a/tests/product.rbc
+++ b/tests/product.rbc
@@ -54,6 +54,7 @@
   rblf.soong_config_append(g, "NS1", "v2", "def")
   rblf.soong_config_set(g, "NS2", "v3", "abc")
   rblf.soong_config_set(g, "NS2", "v3", "xyz")
+  rblf.soong_config_set(g, "NS2", "v4", "xyz   ")
 
   rblf.mkdist_for_goals(g, "goal", "dir1/file1:out1 dir1/file2:out2")
   rblf.mkdist_for_goals(g, "goal", "dir2/file2:")
diff --git a/tests/run.rbc b/tests/run.rbc
index 33583eb..85d6c09 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -144,7 +144,8 @@
             "v2": "def"
         },
         "NS2": {
-            "v3": "xyz"
+            "v3": "xyz",
+            "v4": "xyz"
         }
     },
     {k:v for k, v in sorted(ns.items()) }
diff --git a/tools/Android.bp b/tools/Android.bp
index f446973..bea0602 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -69,3 +69,16 @@
   name: "generate_gts_shared_report",
   srcs: ["generate_gts_shared_report.py"],
 }
+
+python_binary_host {
+    name: "list_files",
+    main: "list_files.py",
+    srcs: [
+        "list_files.py",
+    ],
+    version: {
+      py3: {
+        embedded_launcher: true,
+      }
+    }
+}
diff --git a/tools/aconfig/.gitignore b/tools/aconfig/.gitignore
new file mode 100644
index 0000000..1b72444
--- /dev/null
+++ b/tools/aconfig/.gitignore
@@ -0,0 +1,2 @@
+/Cargo.lock
+/target
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
new file mode 100644
index 0000000..25424c5
--- /dev/null
+++ b/tools/aconfig/Android.bp
@@ -0,0 +1,87 @@
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+// host binary: aconfig
+
+rust_protobuf_host {
+    name: "libaconfig_protos",
+    protos: ["protos/aconfig.proto"],
+    crate_name: "aconfig_protos",
+    source_stem: "aconfig_protos",
+    use_protobuf3: true,
+}
+
+rust_defaults {
+    name: "aconfig.defaults",
+    edition: "2021",
+    clippy_lints: "android",
+    lints: "android",
+    srcs: ["src/main.rs"],
+    rustlibs: [
+        "libaconfig_protos",
+        "libanyhow",
+        "libclap",
+        "libprotobuf",
+        "libserde",
+        "libserde_json",
+        "libtinytemplate",
+    ],
+}
+
+rust_binary_host {
+    name: "aconfig",
+    defaults: ["aconfig.defaults"],
+}
+
+rust_test_host {
+    name: "aconfig.test",
+    defaults: ["aconfig.defaults"],
+    rustlibs: [
+        "libitertools",
+    ],
+}
+
+// integration tests: java
+
+device_config_definitions {
+    name: "aconfig.test.flags",
+    namespace: "com.android.aconfig.test",
+    srcs: ["tests/test.aconfig"],
+}
+
+device_config_values {
+    name: "aconfig.test.flag.values",
+    namespace: "com.android.aconfig.test",
+    srcs: [
+        "tests/first.values",
+        "tests/second.values",
+    ],
+}
+
+device_config_value_set {
+    name: "aconfig.test.flag.value_set",
+    values: [
+        "aconfig.test.flag.values",
+    ],
+}
+
+java_device_config_definitions_library {
+    name: "aconfig_test_java",
+    device_config_definitions: "aconfig.test.flags",
+}
+
+android_test {
+    name: "aconfig.test.java",
+    srcs: [
+        "tests/**/*.java",
+    ],
+    manifest: "tests/AndroidManifest.xml",
+    certificate: "platform",
+    static_libs: [
+        "androidx.test.rules",
+        "testng",
+        "aconfig_test_java",
+    ],
+    test_suites: ["device-tests"],
+}
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
new file mode 100644
index 0000000..b3c73b8
--- /dev/null
+++ b/tools/aconfig/Cargo.toml
@@ -0,0 +1,23 @@
+[package]
+name = "aconfig"
+version = "0.1.0"
+edition = "2021"
+build = "build.rs"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+clap = { version = "4.1.8", features = ["derive"] }
+protobuf = "3.2.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
+tinytemplate = "1.2.1"
+
+[build-dependencies]
+protobuf-codegen = "3.2.0"
+
+[dev-dependencies]
+itertools = "0.10.5"
diff --git a/tools/aconfig/MODULE_LICENSE_APACHE2 b/tools/aconfig/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/aconfig/MODULE_LICENSE_APACHE2
diff --git a/tools/aconfig/OWNERS b/tools/aconfig/OWNERS
new file mode 100644
index 0000000..4e05b00
--- /dev/null
+++ b/tools/aconfig/OWNERS
@@ -0,0 +1,5 @@
+amhk@google.com
+jham@google.com
+joeo@google.com
+opg@google.com
+zhidou@google.com
diff --git a/tools/aconfig/PREUPLOAD.cfg b/tools/aconfig/PREUPLOAD.cfg
new file mode 100644
index 0000000..75ed57c
--- /dev/null
+++ b/tools/aconfig/PREUPLOAD.cfg
@@ -0,0 +1,5 @@
+[Builtin Hooks]
+rustfmt = true
+
+[Builtin Hooks Options]
+rustfmt = --config-path=rustfmt.toml
diff --git a/tools/aconfig/build.rs b/tools/aconfig/build.rs
new file mode 100644
index 0000000..5ef5b60
--- /dev/null
+++ b/tools/aconfig/build.rs
@@ -0,0 +1,17 @@
+use protobuf_codegen::Codegen;
+
+fn main() {
+    let proto_files = vec!["protos/aconfig.proto"];
+
+    // tell cargo to only re-run the build script if any of the proto files has changed
+    for path in &proto_files {
+        println!("cargo:rerun-if-changed={}", path);
+    }
+
+    Codegen::new()
+        .pure()
+        .include("protos")
+        .inputs(proto_files)
+        .cargo_out_dir("aconfig_proto")
+        .run_from_script();
+}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
new file mode 100644
index 0000000..9f6424f
--- /dev/null
+++ b/tools/aconfig/protos/aconfig.proto
@@ -0,0 +1,80 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License
+
+// This is the schema definition for aconfig files. Modifications need to be
+// either backwards compatible, or include updates to all aconfig files in the
+// Android tree.
+
+syntax = "proto2";
+
+package android.aconfig;
+
+// messages used in both aconfig input and output
+
+enum flag_state {
+  ENABLED = 1;
+  DISABLED = 2;
+}
+
+enum flag_permission {
+  READ_ONLY = 1;
+  READ_WRITE = 2;
+}
+
+// aconfig input messages: flag declarations and values
+
+message flag_declaration {
+  required string name = 1;
+  required string namespace = 2;
+  required string description = 3;
+};
+
+message flag_declarations {
+  required string package = 1;
+  repeated flag_declaration flag = 2;
+};
+
+message flag_value {
+  required string package = 1;
+  required string name = 2;
+  required flag_state state = 3;
+  required flag_permission permission = 4;
+};
+
+message flag_values {
+  repeated flag_value flag_value = 1;
+};
+
+// aconfig output messages: parsed and verified flag declarations and values
+
+message tracepoint {
+  // path to declaration or value file relative to $TOP
+  required string source = 1;
+  required flag_state state = 2;
+  required flag_permission permission = 3;
+}
+
+message parsed_flag {
+  required string package = 1;
+  required string name = 2;
+  required string namespace = 3;
+  required string description = 4;
+  required flag_state state = 5;
+  required flag_permission permission = 6;
+  repeated tracepoint trace = 7;
+}
+
+message parsed_flags {
+  repeated parsed_flag parsed_flag = 1;
+}
diff --git a/tools/aconfig/rustfmt.toml b/tools/aconfig/rustfmt.toml
new file mode 120000
index 0000000..291e99b
--- /dev/null
+++ b/tools/aconfig/rustfmt.toml
@@ -0,0 +1 @@
+../../../soong/scripts/rustfmt.toml
\ No newline at end of file
diff --git a/tools/aconfig/src/aconfig.rs b/tools/aconfig/src/aconfig.rs
new file mode 100644
index 0000000..5e7c861
--- /dev/null
+++ b/tools/aconfig/src/aconfig.rs
@@ -0,0 +1,306 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{anyhow, bail, Context, Error, Result};
+use protobuf::{Enum, EnumOrUnknown};
+use serde::{Deserialize, Serialize};
+
+use crate::cache::{Cache, Item, Tracepoint};
+use crate::protos::{
+    ProtoFlagDeclaration, ProtoFlagDeclarations, ProtoFlagPermission, ProtoFlagState,
+    ProtoFlagValue, ProtoFlagValues, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
+};
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum FlagState {
+    Enabled,
+    Disabled,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagState>> for FlagState {
+    type Error = Error;
+
+    fn try_from(proto: EnumOrUnknown<ProtoFlagState>) -> Result<Self, Self::Error> {
+        match ProtoFlagState::from_i32(proto.value()) {
+            Some(ProtoFlagState::ENABLED) => Ok(FlagState::Enabled),
+            Some(ProtoFlagState::DISABLED) => Ok(FlagState::Disabled),
+            None => Err(anyhow!("unknown flag state enum value {}", proto.value())),
+        }
+    }
+}
+
+impl From<FlagState> for ProtoFlagState {
+    fn from(state: FlagState) -> Self {
+        match state {
+            FlagState::Enabled => ProtoFlagState::ENABLED,
+            FlagState::Disabled => ProtoFlagState::DISABLED,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum Permission {
+    ReadOnly,
+    ReadWrite,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagPermission>> for Permission {
+    type Error = Error;
+
+    fn try_from(proto: EnumOrUnknown<ProtoFlagPermission>) -> Result<Self, Self::Error> {
+        match ProtoFlagPermission::from_i32(proto.value()) {
+            Some(ProtoFlagPermission::READ_ONLY) => Ok(Permission::ReadOnly),
+            Some(ProtoFlagPermission::READ_WRITE) => Ok(Permission::ReadWrite),
+            None => Err(anyhow!("unknown permission enum value {}", proto.value())),
+        }
+    }
+}
+
+impl From<Permission> for ProtoFlagPermission {
+    fn from(permission: Permission) -> Self {
+        match permission {
+            Permission::ReadOnly => ProtoFlagPermission::READ_ONLY,
+            Permission::ReadWrite => ProtoFlagPermission::READ_WRITE,
+        }
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagDeclaration {
+    pub name: String,
+    pub namespace: String,
+    pub description: String,
+}
+
+impl FlagDeclaration {
+    #[allow(dead_code)] // only used in unit tests
+    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclaration> {
+        let proto: ProtoFlagDeclaration = crate::protos::try_from_text_proto(text_proto)
+            .with_context(|| text_proto.to_owned())?;
+        proto.try_into()
+    }
+}
+
+impl TryFrom<ProtoFlagDeclaration> for FlagDeclaration {
+    type Error = Error;
+
+    fn try_from(proto: ProtoFlagDeclaration) -> Result<Self, Self::Error> {
+        let Some(name) = proto.name else {
+            bail!("missing 'name' field");
+        };
+        let Some(namespace) = proto.namespace else {
+            bail!("missing 'namespace' field");
+        };
+        let Some(description) = proto.description else {
+            bail!("missing 'description' field");
+        };
+        Ok(FlagDeclaration { name, namespace, description })
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagDeclarations {
+    pub package: String,
+    pub flags: Vec<FlagDeclaration>,
+}
+
+impl FlagDeclarations {
+    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclarations> {
+        let proto: ProtoFlagDeclarations = crate::protos::try_from_text_proto(text_proto)
+            .with_context(|| text_proto.to_owned())?;
+        let Some(package) = proto.package else {
+            bail!("missing 'package' field");
+        };
+        let mut flags = vec![];
+        for proto_flag in proto.flag.into_iter() {
+            flags.push(proto_flag.try_into()?);
+        }
+        Ok(FlagDeclarations { package, flags })
+    }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagValue {
+    pub package: String,
+    pub name: String,
+    pub state: FlagState,
+    pub permission: Permission,
+}
+
+impl FlagValue {
+    #[allow(dead_code)] // only used in unit tests
+    pub fn try_from_text_proto(text_proto: &str) -> Result<FlagValue> {
+        let proto: ProtoFlagValue = crate::protos::try_from_text_proto(text_proto)?;
+        proto.try_into()
+    }
+
+    pub fn try_from_text_proto_list(text_proto: &str) -> Result<Vec<FlagValue>> {
+        let proto: ProtoFlagValues = crate::protos::try_from_text_proto(text_proto)?;
+        proto.flag_value.into_iter().map(|proto_flag| proto_flag.try_into()).collect()
+    }
+}
+
+impl TryFrom<ProtoFlagValue> for FlagValue {
+    type Error = Error;
+
+    fn try_from(proto: ProtoFlagValue) -> Result<Self, Self::Error> {
+        let Some(package) = proto.package else {
+            bail!("missing 'package' field");
+        };
+        let Some(name) = proto.name else {
+            bail!("missing 'name' field");
+        };
+        let Some(proto_state) = proto.state else {
+            bail!("missing 'state' field");
+        };
+        let state = proto_state.try_into()?;
+        let Some(proto_permission) = proto.permission else {
+            bail!("missing 'permission' field");
+        };
+        let permission = proto_permission.try_into()?;
+        Ok(FlagValue { package, name, state, permission })
+    }
+}
+
+impl From<Cache> for ProtoParsedFlags {
+    fn from(cache: Cache) -> Self {
+        let mut proto = ProtoParsedFlags::new();
+        for item in cache.into_iter() {
+            proto.parsed_flag.push(item.into());
+        }
+        proto
+    }
+}
+
+impl From<Item> for ProtoParsedFlag {
+    fn from(item: Item) -> Self {
+        let mut proto = crate::protos::ProtoParsedFlag::new();
+        proto.set_package(item.package.to_owned());
+        proto.set_name(item.name.clone());
+        proto.set_namespace(item.namespace.clone());
+        proto.set_description(item.description.clone());
+        proto.set_state(item.state.into());
+        proto.set_permission(item.permission.into());
+        for trace in item.trace.into_iter() {
+            proto.trace.push(trace.into());
+        }
+        proto
+    }
+}
+
+impl From<Tracepoint> for ProtoTracepoint {
+    fn from(tracepoint: Tracepoint) -> Self {
+        let mut proto = ProtoTracepoint::new();
+        proto.set_source(format!("{}", tracepoint.source));
+        proto.set_state(tracepoint.state.into());
+        proto.set_permission(tracepoint.permission.into());
+        proto
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_flag_try_from_text_proto() {
+        let expected = FlagDeclaration {
+            name: "1234".to_owned(),
+            namespace: "ns".to_owned(),
+            description: "Description of the flag".to_owned(),
+        };
+
+        let s = r#"
+        name: "1234"
+        namespace: "ns"
+        description: "Description of the flag"
+        "#;
+        let actual = FlagDeclaration::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+
+    #[test]
+    fn test_flag_try_from_text_proto_bad_input() {
+        let s = r#"
+        name: "a"
+        "#;
+        let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
+        assert!(format!("{:?}", error).contains("Message not initialized"));
+
+        let s = r#"
+        description: "Description of the flag"
+        "#;
+        let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
+        assert!(format!("{:?}", error).contains("Message not initialized"));
+    }
+
+    #[test]
+    fn test_package_try_from_text_proto() {
+        let expected = FlagDeclarations {
+            package: "com.example".to_owned(),
+            flags: vec![
+                FlagDeclaration {
+                    name: "a".to_owned(),
+                    namespace: "ns".to_owned(),
+                    description: "A".to_owned(),
+                },
+                FlagDeclaration {
+                    name: "b".to_owned(),
+                    namespace: "ns".to_owned(),
+                    description: "B".to_owned(),
+                },
+            ],
+        };
+
+        let s = r#"
+        package: "com.example"
+        flag {
+            name: "a"
+            namespace: "ns"
+            description: "A"
+        }
+        flag {
+            name: "b"
+            namespace: "ns"
+            description: "B"
+        }
+        "#;
+        let actual = FlagDeclarations::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+
+    #[test]
+    fn test_flag_declaration_try_from_text_proto_list() {
+        let expected = FlagValue {
+            package: "com.example".to_owned(),
+            name: "1234".to_owned(),
+            state: FlagState::Enabled,
+            permission: Permission::ReadOnly,
+        };
+
+        let s = r#"
+        package: "com.example"
+        name: "1234"
+        state: ENABLED
+        permission: READ_ONLY
+        "#;
+        let actual = FlagValue::try_from_text_proto(s).unwrap();
+
+        assert_eq!(expected, actual);
+    }
+}
diff --git a/tools/aconfig/src/cache.rs b/tools/aconfig/src/cache.rs
new file mode 100644
index 0000000..dd54480
--- /dev/null
+++ b/tools/aconfig/src/cache.rs
@@ -0,0 +1,388 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{bail, ensure, Result};
+use serde::{Deserialize, Serialize};
+use std::io::{Read, Write};
+
+use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
+use crate::codegen;
+use crate::commands::Source;
+
+const DEFAULT_FLAG_STATE: FlagState = FlagState::Disabled;
+const DEFAULT_FLAG_PERMISSION: Permission = Permission::ReadWrite;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Tracepoint {
+    pub source: Source,
+    pub state: FlagState,
+    pub permission: Permission,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Item {
+    // TODO: duplicating the Cache.package as Item.package makes the internal representation
+    // closer to the proto message `parsed_flag`; hopefully this will enable us to replace the Item
+    // struct and use a newtype instead once aconfig has matured. Until then, package should
+    // really be a Cow<String>.
+    pub package: String,
+    pub name: String,
+    pub namespace: String,
+    pub description: String,
+    pub state: FlagState,
+    pub permission: Permission,
+    pub trace: Vec<Tracepoint>,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Cache {
+    package: String,
+    items: Vec<Item>,
+}
+
+// TODO: replace this function with Iterator.is_sorted_by_key(...)) when that API becomes stable
+fn iter_is_sorted_by_key<'a, T: 'a, F, K>(iter: impl Iterator<Item = &'a T>, f: F) -> bool
+where
+    F: FnMut(&'a T) -> K,
+    K: PartialOrd<K>,
+{
+    let mut last: Option<K> = None;
+    for current in iter.map(f) {
+        if let Some(l) = last {
+            if l > current {
+                return false;
+            }
+        }
+        last = Some(current);
+    }
+    true
+}
+
+impl Cache {
+    pub fn read_from_reader(reader: impl Read) -> Result<Cache> {
+        let cache: Cache = serde_json::from_reader(reader)?;
+        ensure!(
+            iter_is_sorted_by_key(cache.iter(), |item| &item.name),
+            "internal error: flags in cache file not sorted"
+        );
+        Ok(cache)
+    }
+
+    pub fn write_to_writer(&self, writer: impl Write) -> Result<()> {
+        ensure!(
+            iter_is_sorted_by_key(self.iter(), |item| &item.name),
+            "internal error: flags in cache file not sorted"
+        );
+        serde_json::to_writer(writer, self).map_err(|e| e.into())
+    }
+
+    pub fn iter(&self) -> impl Iterator<Item = &Item> {
+        self.items.iter()
+    }
+
+    pub fn into_iter(self) -> impl Iterator<Item = Item> {
+        self.items.into_iter()
+    }
+
+    pub fn package(&self) -> &str {
+        debug_assert!(!self.package.is_empty());
+        &self.package
+    }
+}
+
+#[derive(Debug)]
+pub struct CacheBuilder {
+    cache: Cache,
+}
+
+impl CacheBuilder {
+    pub fn new(package: String) -> Result<CacheBuilder> {
+        ensure!(codegen::is_valid_package_ident(&package), "bad package");
+        let cache = Cache { package, items: vec![] };
+        Ok(CacheBuilder { cache })
+    }
+
+    pub fn add_flag_declaration(
+        &mut self,
+        source: Source,
+        declaration: FlagDeclaration,
+    ) -> Result<&mut CacheBuilder> {
+        ensure!(codegen::is_valid_name_ident(&declaration.name), "bad flag name");
+        ensure!(codegen::is_valid_name_ident(&declaration.namespace), "bad namespace");
+        ensure!(!declaration.description.is_empty(), "empty flag description");
+        ensure!(
+            self.cache.items.iter().all(|item| item.name != declaration.name),
+            "failed to declare flag {} from {}: flag already declared",
+            declaration.name,
+            source
+        );
+        self.cache.items.push(Item {
+            package: self.cache.package.clone(),
+            name: declaration.name.clone(),
+            namespace: declaration.namespace.clone(),
+            description: declaration.description,
+            state: DEFAULT_FLAG_STATE,
+            permission: DEFAULT_FLAG_PERMISSION,
+            trace: vec![Tracepoint {
+                source,
+                state: DEFAULT_FLAG_STATE,
+                permission: DEFAULT_FLAG_PERMISSION,
+            }],
+        });
+        Ok(self)
+    }
+
+    pub fn add_flag_value(
+        &mut self,
+        source: Source,
+        value: FlagValue,
+    ) -> Result<&mut CacheBuilder> {
+        ensure!(codegen::is_valid_package_ident(&value.package), "bad flag package");
+        ensure!(codegen::is_valid_name_ident(&value.name), "bad flag name");
+        ensure!(
+            value.package == self.cache.package,
+            "failed to set values for flag {}/{} from {}: expected package {}",
+            value.package,
+            value.name,
+            source,
+            self.cache.package
+        );
+        let Some(existing_item) = self.cache.items.iter_mut().find(|item| item.name == value.name) else {
+            bail!("failed to set values for flag {}/{} from {}: flag not declared", value.package, value.name, source);
+        };
+        existing_item.state = value.state;
+        existing_item.permission = value.permission;
+        existing_item.trace.push(Tracepoint {
+            source,
+            state: value.state,
+            permission: value.permission,
+        });
+        Ok(self)
+    }
+
+    pub fn build(mut self) -> Cache {
+        self.cache.items.sort_by_cached_key(|item| item.name.clone());
+        self.cache
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_add_flag_declaration() {
+        let mut builder = CacheBuilder::new("com.example".to_string()).unwrap();
+        builder
+            .add_flag_declaration(
+                Source::File("first.txt".to_string()),
+                FlagDeclaration {
+                    name: "foo".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "desc".to_string(),
+                },
+            )
+            .unwrap();
+        let error = builder
+            .add_flag_declaration(
+                Source::File("second.txt".to_string()),
+                FlagDeclaration {
+                    name: "foo".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "desc".to_string(),
+                },
+            )
+            .unwrap_err();
+        assert_eq!(
+            &format!("{:?}", error),
+            "failed to declare flag foo from second.txt: flag already declared"
+        );
+        builder
+            .add_flag_declaration(
+                Source::File("first.txt".to_string()),
+                FlagDeclaration {
+                    name: "bar".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "desc".to_string(),
+                },
+            )
+            .unwrap();
+
+        let cache = builder.build();
+
+        // check flags are sorted by name
+        assert_eq!(
+            cache.into_iter().map(|item| item.name).collect::<Vec<_>>(),
+            vec!["bar".to_string(), "foo".to_string()]
+        );
+    }
+
+    #[test]
+    fn test_add_flag_value() {
+        let mut builder = CacheBuilder::new("com.example".to_string()).unwrap();
+        let error = builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "com.example".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap_err();
+        assert_eq!(
+            &format!("{:?}", error),
+            "failed to set values for flag com.example/foo from <memory>: flag not declared"
+        );
+
+        builder
+            .add_flag_declaration(
+                Source::File("first.txt".to_string()),
+                FlagDeclaration {
+                    name: "foo".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "desc".to_string(),
+                },
+            )
+            .unwrap();
+
+        builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "com.example".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Disabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap();
+
+        builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "com.example".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadWrite,
+                },
+            )
+            .unwrap();
+
+        // different package -> no-op
+        let error = builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "some_other_package".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "failed to set values for flag some_other_package/foo from <memory>: expected package com.example");
+
+        let cache = builder.build();
+        let item = cache.iter().find(|&item| item.name == "foo").unwrap();
+        assert_eq!(FlagState::Enabled, item.state);
+        assert_eq!(Permission::ReadWrite, item.permission);
+    }
+
+    #[test]
+    fn test_reject_empty_cache_package() {
+        CacheBuilder::new("".to_string()).unwrap_err();
+    }
+
+    #[test]
+    fn test_reject_empty_flag_declaration_fields() {
+        let mut builder = CacheBuilder::new("com.example".to_string()).unwrap();
+
+        let error = builder
+            .add_flag_declaration(
+                Source::Memory,
+                FlagDeclaration {
+                    name: "".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "Description".to_string(),
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "bad flag name");
+
+        let error = builder
+            .add_flag_declaration(
+                Source::Memory,
+                FlagDeclaration {
+                    name: "foo".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "".to_string(),
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "empty flag description");
+    }
+
+    #[test]
+    fn test_reject_empty_flag_value_files() {
+        let mut builder = CacheBuilder::new("com.example".to_string()).unwrap();
+        builder
+            .add_flag_declaration(
+                Source::Memory,
+                FlagDeclaration {
+                    name: "foo".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "desc".to_string(),
+                },
+            )
+            .unwrap();
+
+        let error = builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "".to_string(),
+                    name: "foo".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "bad flag package");
+
+        let error = builder
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: "com.example".to_string(),
+                    name: "".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap_err();
+        assert_eq!(&format!("{:?}", error), "bad flag name");
+    }
+
+    #[test]
+    fn test_iter_is_sorted_by_key() {
+        assert!(iter_is_sorted_by_key(["a", "b", "c"].iter(), |s| s));
+        assert!(iter_is_sorted_by_key(Vec::<&str>::new().iter(), |s| s));
+        assert!(!iter_is_sorted_by_key(["a", "c", "b"].iter(), |s| s));
+    }
+}
diff --git a/tools/aconfig/src/codegen.rs b/tools/aconfig/src/codegen.rs
new file mode 100644
index 0000000..fea9961
--- /dev/null
+++ b/tools/aconfig/src/codegen.rs
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{ensure, Result};
+
+pub fn is_valid_name_ident(s: &str) -> bool {
+    // Identifiers must match [a-z][a-z0-9_]*
+    let mut chars = s.chars();
+    let Some(first) = chars.next() else {
+        return false;
+    };
+    if !first.is_ascii_lowercase() {
+        return false;
+    }
+    chars.all(|ch| ch.is_ascii_lowercase() || ch.is_ascii_digit() || ch == '_')
+}
+
+pub fn is_valid_package_ident(s: &str) -> bool {
+    s.split('.').all(is_valid_name_ident)
+}
+
+pub fn create_device_config_ident(package: &str, flag_name: &str) -> Result<String> {
+    ensure!(is_valid_package_ident(package), "bad package");
+    ensure!(is_valid_package_ident(flag_name), "bad flag name");
+    Ok(format!("{}.{}", package, flag_name))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_is_valid_name_ident() {
+        assert!(is_valid_name_ident("foo"));
+        assert!(is_valid_name_ident("foo_bar_123"));
+
+        assert!(!is_valid_name_ident(""));
+        assert!(!is_valid_name_ident("123_foo"));
+        assert!(!is_valid_name_ident("foo-bar"));
+        assert!(!is_valid_name_ident("foo-b\u{00e5}r"));
+    }
+
+    #[test]
+    fn test_is_valid_package_ident() {
+        assert!(is_valid_package_ident("foo"));
+        assert!(is_valid_package_ident("foo_bar_123"));
+        assert!(is_valid_package_ident("foo.bar"));
+        assert!(is_valid_package_ident("foo.bar.a123"));
+
+        assert!(!is_valid_package_ident(""));
+        assert!(!is_valid_package_ident("123_foo"));
+        assert!(!is_valid_package_ident("foo-bar"));
+        assert!(!is_valid_package_ident("foo-b\u{00e5}r"));
+        assert!(!is_valid_package_ident("foo.bar.123"));
+        assert!(!is_valid_package_ident(".foo.bar"));
+        assert!(!is_valid_package_ident("foo.bar."));
+        assert!(!is_valid_package_ident("."));
+        assert!(!is_valid_package_ident("foo..bar"));
+    }
+
+    #[test]
+    fn test_create_device_config_ident() {
+        assert_eq!(
+            "com.foo.bar.some_flag",
+            create_device_config_ident("com.foo.bar", "some_flag").unwrap()
+        );
+    }
+}
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen_cpp.rs
new file mode 100644
index 0000000..37b058d
--- /dev/null
+++ b/tools/aconfig/src/codegen_cpp.rs
@@ -0,0 +1,225 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{ensure, Result};
+use serde::Serialize;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+use crate::codegen;
+use crate::commands::OutputFile;
+
+pub fn generate_cpp_code(cache: &Cache) -> Result<OutputFile> {
+    let package = cache.package();
+    let class_elements: Vec<ClassElement> =
+        cache.iter().map(|item| create_class_element(package, item)).collect();
+    let readwrite = class_elements.iter().any(|item| item.readwrite);
+    let header = package.replace('.', "_");
+    let cpp_namespace = package.replace('.', "::");
+    ensure!(codegen::is_valid_name_ident(&header));
+    let context = Context {
+        header: header.clone(),
+        cpp_namespace,
+        package: package.to_string(),
+        readwrite,
+        class_elements,
+    };
+    let mut template = TinyTemplate::new();
+    template.add_template("cpp_code_gen", include_str!("../templates/cpp.template"))?;
+    let contents = template.render("cpp_code_gen", &context)?;
+    let path = ["aconfig", &(header + ".h")].iter().collect();
+    Ok(OutputFile { contents: contents.into(), path })
+}
+
+#[derive(Serialize)]
+struct Context {
+    pub header: String,
+    pub cpp_namespace: String,
+    pub package: String,
+    pub readwrite: bool,
+    pub class_elements: Vec<ClassElement>,
+}
+
+#[derive(Serialize)]
+struct ClassElement {
+    pub readwrite: bool,
+    pub default_value: String,
+    pub flag_name: String,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
+}
+
+fn create_class_element(package: &str, item: &Item) -> ClassElement {
+    ClassElement {
+        readwrite: item.permission == Permission::ReadWrite,
+        default_value: if item.state == FlagState::Enabled {
+            "true".to_string()
+        } else {
+            "false".to_string()
+        },
+        flag_name: item.name.clone(),
+        device_config_namespace: item.namespace.to_string(),
+        device_config_flag: codegen::create_device_config_ident(package, &item.name)
+            .expect("values checked at cache creation time"),
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
+    use crate::cache::CacheBuilder;
+    use crate::commands::Source;
+
+    #[test]
+    fn test_cpp_codegen_build_time_flag_only() {
+        let package = "com.example";
+        let mut builder = CacheBuilder::new(package.to_string()).unwrap();
+        builder
+            .add_flag_declaration(
+                Source::File("aconfig_one.txt".to_string()),
+                FlagDeclaration {
+                    name: "my_flag_one".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "buildtime disable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: package.to_string(),
+                    name: "my_flag_one".to_string(),
+                    state: FlagState::Disabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap()
+            .add_flag_declaration(
+                Source::File("aconfig_two.txt".to_string()),
+                FlagDeclaration {
+                    name: "my_flag_two".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "buildtime enable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: package.to_string(),
+                    name: "my_flag_two".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap();
+        let cache = builder.build();
+        let expect_content = r#"#ifndef com_example_HEADER_H
+        #define com_example_HEADER_H
+
+        namespace com::example {
+
+            static const bool my_flag_one() {
+                return false;
+            }
+
+            static const bool my_flag_two() {
+                return true;
+            }
+
+        }
+        #endif
+        "#;
+        let file = generate_cpp_code(&cache).unwrap();
+        assert_eq!("aconfig/com_example.h", file.path.to_str().unwrap());
+        assert_eq!(
+            expect_content.replace(' ', ""),
+            String::from_utf8(file.contents).unwrap().replace(' ', "")
+        );
+    }
+
+    #[test]
+    fn test_cpp_codegen_runtime_flag() {
+        let package = "com.example";
+        let mut builder = CacheBuilder::new(package.to_string()).unwrap();
+        builder
+            .add_flag_declaration(
+                Source::File("aconfig_one.txt".to_string()),
+                FlagDeclaration {
+                    name: "my_flag_one".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "buildtime disable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_declaration(
+                Source::File("aconfig_two.txt".to_string()),
+                FlagDeclaration {
+                    name: "my_flag_two".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "runtime enable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: package.to_string(),
+                    name: "my_flag_two".to_string(),
+                    state: FlagState::Enabled,
+                    permission: Permission::ReadWrite,
+                },
+            )
+            .unwrap();
+        let cache = builder.build();
+        let expect_content = r#"#ifndef com_example_HEADER_H
+        #define com_example_HEADER_H
+
+        #include <server_configurable_flags/get_flags.h>
+        using namespace server_configurable_flags;
+
+        namespace com::example {
+
+            static const bool my_flag_one() {
+                return GetServerConfigurableFlag(
+                    "ns",
+                    "com.example.my_flag_one",
+                    "false") == "true";
+            }
+
+            static const bool my_flag_two() {
+                return GetServerConfigurableFlag(
+                    "ns",
+                    "com.example.my_flag_two",
+                    "true") == "true";
+            }
+
+        }
+        #endif
+        "#;
+        let file = generate_cpp_code(&cache).unwrap();
+        assert_eq!("aconfig/com_example.h", file.path.to_str().unwrap());
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                expect_content,
+                &String::from_utf8(file.contents).unwrap()
+            )
+        );
+    }
+}
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
new file mode 100644
index 0000000..cf025cb
--- /dev/null
+++ b/tools/aconfig/src/codegen_java.rs
@@ -0,0 +1,145 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use serde::Serialize;
+use std::path::PathBuf;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+use crate::codegen;
+use crate::commands::OutputFile;
+
+pub fn generate_java_code(cache: &Cache) -> Result<OutputFile> {
+    let package = cache.package();
+    let class_elements: Vec<ClassElement> =
+        cache.iter().map(|item| create_class_element(package, item)).collect();
+    let readwrite = class_elements.iter().any(|item| item.readwrite);
+    let context = Context { package: package.to_string(), readwrite, class_elements };
+    let mut template = TinyTemplate::new();
+    template.add_template("java_code_gen", include_str!("../templates/java.template"))?;
+    let contents = template.render("java_code_gen", &context)?;
+    let mut path: PathBuf = package.split('.').collect();
+    // TODO: Allow customization of the java class name
+    path.push("Flags.java");
+    Ok(OutputFile { contents: contents.into(), path })
+}
+
+#[derive(Serialize)]
+struct Context {
+    pub package: String,
+    pub readwrite: bool,
+    pub class_elements: Vec<ClassElement>,
+}
+
+#[derive(Serialize)]
+struct ClassElement {
+    pub method_name: String,
+    pub readwrite: bool,
+    pub default_value: String,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
+}
+
+fn create_class_element(package: &str, item: &Item) -> ClassElement {
+    let device_config_flag = codegen::create_device_config_ident(package, &item.name)
+        .expect("values checked at cache creation time");
+    ClassElement {
+        method_name: item.name.replace('-', "_"),
+        readwrite: item.permission == Permission::ReadWrite,
+        default_value: if item.state == FlagState::Enabled {
+            "true".to_string()
+        } else {
+            "false".to_string()
+        },
+        device_config_namespace: item.namespace.clone(),
+        device_config_flag,
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{FlagDeclaration, FlagValue};
+    use crate::cache::CacheBuilder;
+    use crate::commands::Source;
+
+    #[test]
+    fn test_generate_java_code() {
+        let package = "com.example";
+        let mut builder = CacheBuilder::new(package.to_string()).unwrap();
+        builder
+            .add_flag_declaration(
+                Source::File("test.txt".to_string()),
+                FlagDeclaration {
+                    name: "test".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "buildtime enable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_declaration(
+                Source::File("test2.txt".to_string()),
+                FlagDeclaration {
+                    name: "test2".to_string(),
+                    namespace: "ns".to_string(),
+                    description: "runtime disable".to_string(),
+                },
+            )
+            .unwrap()
+            .add_flag_value(
+                Source::Memory,
+                FlagValue {
+                    package: package.to_string(),
+                    name: "test".to_string(),
+                    state: FlagState::Disabled,
+                    permission: Permission::ReadOnly,
+                },
+            )
+            .unwrap();
+        let cache = builder.build();
+        let expect_content = r#"package com.example;
+
+        import android.provider.DeviceConfig;
+
+        public final class Flags {
+
+            public static boolean test() {
+                return false;
+            }
+
+            public static boolean test2() {
+                return DeviceConfig.getBoolean(
+                    "ns",
+                    "com.example.test2",
+                    false
+                );
+            }
+
+        }
+        "#;
+        let file = generate_java_code(&cache).unwrap();
+        assert_eq!("com/example/Flags.java", file.path.to_str().unwrap());
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                expect_content,
+                &String::from_utf8(file.contents).unwrap()
+            )
+        );
+    }
+}
diff --git a/tools/aconfig/src/codegen_rust.rs b/tools/aconfig/src/codegen_rust.rs
new file mode 100644
index 0000000..98caeae
--- /dev/null
+++ b/tools/aconfig/src/codegen_rust.rs
@@ -0,0 +1,137 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use serde::Serialize;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+use crate::codegen;
+use crate::commands::OutputFile;
+
+pub fn generate_rust_code(cache: &Cache) -> Result<OutputFile> {
+    let package = cache.package();
+    let parsed_flags: Vec<TemplateParsedFlag> =
+        cache.iter().map(|item| TemplateParsedFlag::new(package, item)).collect();
+    let context = TemplateContext {
+        package: package.to_string(),
+        parsed_flags,
+        modules: package.split('.').map(|s| s.to_string()).collect::<Vec<_>>(),
+    };
+    let mut template = TinyTemplate::new();
+    template.add_template("rust_code_gen", include_str!("../templates/rust.template"))?;
+    let contents = template.render("rust_code_gen", &context)?;
+    let path = ["src", "lib.rs"].iter().collect();
+    Ok(OutputFile { contents: contents.into(), path })
+}
+
+#[derive(Serialize)]
+struct TemplateContext {
+    pub package: String,
+    pub parsed_flags: Vec<TemplateParsedFlag>,
+    pub modules: Vec<String>,
+}
+
+#[derive(Serialize)]
+struct TemplateParsedFlag {
+    pub name: String,
+    pub device_config_namespace: String,
+    pub device_config_flag: String,
+
+    // TinyTemplate's conditionals are limited to single <bool> expressions; list all options here
+    // Invariant: exactly one of these fields will be true
+    pub is_read_only_enabled: bool,
+    pub is_read_only_disabled: bool,
+    pub is_read_write: bool,
+}
+
+impl TemplateParsedFlag {
+    #[allow(clippy::nonminimal_bool)]
+    fn new(package: &str, item: &Item) -> Self {
+        let template = TemplateParsedFlag {
+            name: item.name.clone(),
+            device_config_namespace: item.namespace.to_string(),
+            device_config_flag: codegen::create_device_config_ident(package, &item.name)
+                .expect("values checked at cache creation time"),
+            is_read_only_enabled: item.permission == Permission::ReadOnly
+                && item.state == FlagState::Enabled,
+            is_read_only_disabled: item.permission == Permission::ReadOnly
+                && item.state == FlagState::Disabled,
+            is_read_write: item.permission == Permission::ReadWrite,
+        };
+        #[rustfmt::skip]
+        debug_assert!(
+            (template.is_read_only_enabled && !template.is_read_only_disabled && !template.is_read_write) ||
+            (!template.is_read_only_enabled && template.is_read_only_disabled && !template.is_read_write) ||
+            (!template.is_read_only_enabled && !template.is_read_only_disabled && template.is_read_write),
+            "TemplateParsedFlag invariant failed: {} {} {}",
+            template.is_read_only_enabled,
+            template.is_read_only_disabled,
+            template.is_read_write,
+        );
+        template
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_generate_rust_code() {
+        let cache = crate::test::create_cache();
+        let generated = generate_rust_code(&cache).unwrap();
+        assert_eq!("src/lib.rs", format!("{}", generated.path.display()));
+        let expected = r#"
+pub mod com {
+pub mod android {
+pub mod aconfig {
+pub mod test {
+#[inline(always)]
+pub const fn r#disabled_ro() -> bool {
+    false
+}
+
+#[inline(always)]
+pub fn r#disabled_rw() -> bool {
+    flags_rust::GetServerConfigurableFlag("aconfig_test", "com.android.aconfig.test.disabled_rw", "false") == "true"
+}
+
+#[inline(always)]
+pub const fn r#enabled_ro() -> bool {
+    true
+}
+
+#[inline(always)]
+pub fn r#enabled_rw() -> bool {
+    flags_rust::GetServerConfigurableFlag("aconfig_test", "com.android.aconfig.test.enabled_rw", "false") == "true"
+}
+
+}
+}
+}
+}
+"#;
+        assert_eq!(
+            None,
+            crate::test::first_significant_code_diff(
+                expected,
+                &String::from_utf8(generated.contents).unwrap()
+            )
+        );
+    }
+}
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
new file mode 100644
index 0000000..586ba04
--- /dev/null
+++ b/tools/aconfig/src/commands.rs
@@ -0,0 +1,321 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{ensure, Context, Result};
+use clap::ValueEnum;
+use protobuf::Message;
+use serde::{Deserialize, Serialize};
+use std::fmt;
+use std::io::Read;
+use std::path::PathBuf;
+
+use crate::aconfig::{FlagDeclarations, FlagState, FlagValue, Permission};
+use crate::cache::{Cache, CacheBuilder, Item};
+use crate::codegen_cpp::generate_cpp_code;
+use crate::codegen_java::generate_java_code;
+use crate::codegen_rust::generate_rust_code;
+use crate::protos::ProtoParsedFlags;
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub enum Source {
+    #[allow(dead_code)] // only used in unit tests
+    Memory,
+    File(String),
+}
+
+impl fmt::Display for Source {
+    fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+        match self {
+            Self::Memory => write!(f, "<memory>"),
+            Self::File(path) => write!(f, "{}", path),
+        }
+    }
+}
+
+pub struct Input {
+    pub source: Source,
+    pub reader: Box<dyn Read>,
+}
+
+pub struct OutputFile {
+    pub path: PathBuf, // relative to some root directory only main knows about
+    pub contents: Vec<u8>,
+}
+
+pub fn create_cache(package: &str, declarations: Vec<Input>, values: Vec<Input>) -> Result<Cache> {
+    let mut builder = CacheBuilder::new(package.to_owned())?;
+
+    for mut input in declarations {
+        let mut contents = String::new();
+        input.reader.read_to_string(&mut contents)?;
+        let dec_list = FlagDeclarations::try_from_text_proto(&contents)
+            .with_context(|| format!("Failed to parse {}", input.source))?;
+        ensure!(
+            package == dec_list.package,
+            "Failed to parse {}: expected package {}, got {}",
+            input.source,
+            package,
+            dec_list.package
+        );
+        for d in dec_list.flags.into_iter() {
+            builder.add_flag_declaration(input.source.clone(), d)?;
+        }
+    }
+
+    for mut input in values {
+        let mut contents = String::new();
+        input.reader.read_to_string(&mut contents)?;
+        let values_list = FlagValue::try_from_text_proto_list(&contents)
+            .with_context(|| format!("Failed to parse {}", input.source))?;
+        for v in values_list {
+            // TODO: warn about flag values that do not take effect?
+            let _ = builder.add_flag_value(input.source.clone(), v);
+        }
+    }
+
+    Ok(builder.build())
+}
+
+pub fn create_java_lib(cache: Cache) -> Result<OutputFile> {
+    generate_java_code(&cache)
+}
+
+pub fn create_cpp_lib(cache: Cache) -> Result<OutputFile> {
+    generate_cpp_code(&cache)
+}
+
+pub fn create_rust_lib(cache: Cache) -> Result<OutputFile> {
+    generate_rust_code(&cache)
+}
+
+pub fn create_device_config_defaults(caches: Vec<Cache>) -> Result<Vec<u8>> {
+    let mut output = Vec::new();
+    for item in sort_and_iter_items(caches).filter(|item| item.permission == Permission::ReadWrite)
+    {
+        let line = format!(
+            "{}:{}.{}={}\n",
+            item.namespace,
+            item.package,
+            item.name,
+            match item.state {
+                FlagState::Enabled => "enabled",
+                FlagState::Disabled => "disabled",
+            }
+        );
+        output.extend_from_slice(line.as_bytes());
+    }
+    Ok(output)
+}
+
+pub fn create_device_config_sysprops(caches: Vec<Cache>) -> Result<Vec<u8>> {
+    let mut output = Vec::new();
+    for item in sort_and_iter_items(caches).filter(|item| item.permission == Permission::ReadWrite)
+    {
+        let line = format!(
+            "persist.device_config.{}.{}={}\n",
+            item.package,
+            item.name,
+            match item.state {
+                FlagState::Enabled => "true",
+                FlagState::Disabled => "false",
+            }
+        );
+        output.extend_from_slice(line.as_bytes());
+    }
+    Ok(output)
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
+pub enum DumpFormat {
+    Text,
+    Debug,
+    Protobuf,
+}
+
+pub fn dump_cache(caches: Vec<Cache>, format: DumpFormat) -> Result<Vec<u8>> {
+    let mut output = Vec::new();
+    match format {
+        DumpFormat::Text => {
+            for item in sort_and_iter_items(caches) {
+                let line = format!(
+                    "{}/{}: {:?} {:?}\n",
+                    item.package, item.name, item.state, item.permission
+                );
+                output.extend_from_slice(line.as_bytes());
+            }
+        }
+        DumpFormat::Debug => {
+            for item in sort_and_iter_items(caches) {
+                let line = format!("{:#?}\n", item);
+                output.extend_from_slice(line.as_bytes());
+            }
+        }
+        DumpFormat::Protobuf => {
+            for cache in sort_and_iter_caches(caches) {
+                let parsed_flags: ProtoParsedFlags = cache.into();
+                parsed_flags.write_to_vec(&mut output)?;
+            }
+        }
+    }
+    Ok(output)
+}
+
+fn sort_and_iter_items(caches: Vec<Cache>) -> impl Iterator<Item = Item> {
+    sort_and_iter_caches(caches).flat_map(|cache| cache.into_iter())
+}
+
+fn sort_and_iter_caches(mut caches: Vec<Cache>) -> impl Iterator<Item = Cache> {
+    caches.sort_by_cached_key(|cache| cache.package().to_string());
+    caches.into_iter()
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::aconfig::{FlagState, Permission};
+
+    fn create_test_cache_com_example() -> Cache {
+        let s = r#"
+        package: "com.example"
+        flag {
+            name: "a"
+            namespace: "ns"
+            description: "Description of a"
+        }
+        flag {
+            name: "b"
+            namespace: "ns"
+            description: "Description of b"
+        }
+        "#;
+        let declarations = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
+        let o = r#"
+        flag_value {
+            package: "com.example"
+            name: "a"
+            state: DISABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let values = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
+        create_cache("com.example", declarations, values).unwrap()
+    }
+
+    fn create_test_cache_com_other() -> Cache {
+        let s = r#"
+        package: "com.other"
+        flag {
+            name: "c"
+            namespace: "ns"
+            description: "Description of c"
+        }
+        "#;
+        let declarations = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
+        let o = r#"
+        flag_value {
+            package: "com.other"
+            name: "c"
+            state: DISABLED
+            permission: READ_ONLY
+        }
+        "#;
+        let values = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
+        create_cache("com.other", declarations, values).unwrap()
+    }
+
+    #[test]
+    fn test_create_cache() {
+        let caches = create_test_cache_com_example(); // calls create_cache
+        let item = caches.iter().find(|&item| item.name == "a").unwrap();
+        assert_eq!(FlagState::Disabled, item.state);
+        assert_eq!(Permission::ReadOnly, item.permission);
+    }
+
+    #[test]
+    fn test_create_device_config_defaults() {
+        let caches = vec![crate::test::create_cache()];
+        let bytes = create_device_config_defaults(caches).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert_eq!("aconfig_test:com.android.aconfig.test.disabled_rw=disabled\naconfig_test:com.android.aconfig.test.enabled_rw=enabled\n", text);
+    }
+
+    #[test]
+    fn test_create_device_config_sysprops() {
+        let caches = vec![crate::test::create_cache()];
+        let bytes = create_device_config_sysprops(caches).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert_eq!("persist.device_config.com.android.aconfig.test.disabled_rw=false\npersist.device_config.com.android.aconfig.test.enabled_rw=true\n", text);
+    }
+
+    #[test]
+    fn test_dump_text_format() {
+        let caches = vec![create_test_cache_com_example()];
+        let bytes = dump_cache(caches, DumpFormat::Text).unwrap();
+        let text = std::str::from_utf8(&bytes).unwrap();
+        assert!(text.contains("a: Disabled"));
+    }
+
+    #[test]
+    fn test_dump_protobuf_format() {
+        use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoTracepoint};
+        use protobuf::Message;
+
+        let caches = vec![create_test_cache_com_example()];
+        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
+        let actual = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+
+        assert_eq!(
+            vec!["a".to_string(), "b".to_string()],
+            actual.parsed_flag.iter().map(|item| item.name.clone().unwrap()).collect::<Vec<_>>()
+        );
+
+        let item =
+            actual.parsed_flag.iter().find(|item| item.name == Some("b".to_string())).unwrap();
+        assert_eq!(item.package(), "com.example");
+        assert_eq!(item.name(), "b");
+        assert_eq!(item.description(), "Description of b");
+        assert_eq!(item.state(), ProtoFlagState::DISABLED);
+        assert_eq!(item.permission(), ProtoFlagPermission::READ_WRITE);
+        let mut tp = ProtoTracepoint::new();
+        tp.set_source("<memory>".to_string());
+        tp.set_state(ProtoFlagState::DISABLED);
+        tp.set_permission(ProtoFlagPermission::READ_WRITE);
+        assert_eq!(item.trace, vec![tp]);
+    }
+
+    #[test]
+    fn test_dump_multiple_caches() {
+        let caches = vec![create_test_cache_com_example(), create_test_cache_com_other()];
+        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
+        let dump = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+        assert_eq!(
+            dump.parsed_flag
+                .iter()
+                .map(|parsed_flag| format!("{}/{}", parsed_flag.package(), parsed_flag.name()))
+                .collect::<Vec<_>>(),
+            vec![
+                "com.example/a".to_string(),
+                "com.example/b".to_string(),
+                "com.other/c".to_string()
+            ]
+        );
+
+        let caches = vec![create_test_cache_com_other(), create_test_cache_com_example()];
+        let bytes = dump_cache(caches, DumpFormat::Protobuf).unwrap();
+        let dump_reversed_input = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+        assert_eq!(dump, dump_reversed_input);
+    }
+}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
new file mode 100644
index 0000000..5a820d9
--- /dev/null
+++ b/tools/aconfig/src/main.rs
@@ -0,0 +1,206 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
+
+use anyhow::{anyhow, ensure, Result};
+use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
+use core::any::Any;
+use std::fs;
+use std::io;
+use std::io::Write;
+use std::path::{Path, PathBuf};
+
+mod aconfig;
+mod cache;
+mod codegen;
+mod codegen_cpp;
+mod codegen_java;
+mod codegen_rust;
+mod commands;
+mod protos;
+
+#[cfg(test)]
+mod test;
+
+use crate::cache::Cache;
+use commands::{DumpFormat, Input, OutputFile, Source};
+
+fn cli() -> Command {
+    Command::new("aconfig")
+        .subcommand_required(true)
+        .subcommand(
+            Command::new("create-cache")
+                .arg(Arg::new("package").long("package").required(true))
+                .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
+                .arg(Arg::new("values").long("values").action(ArgAction::Append))
+                .arg(Arg::new("cache").long("cache").required(true)),
+        )
+        .subcommand(
+            Command::new("create-java-lib")
+                .arg(Arg::new("cache").long("cache").required(true))
+                .arg(Arg::new("out").long("out").required(true)),
+        )
+        .subcommand(
+            Command::new("create-cpp-lib")
+                .arg(Arg::new("cache").long("cache").required(true))
+                .arg(Arg::new("out").long("out").required(true)),
+        )
+        .subcommand(
+            Command::new("create-rust-lib")
+                .arg(Arg::new("cache").long("cache").required(true))
+                .arg(Arg::new("out").long("out").required(true)),
+        )
+        .subcommand(
+            Command::new("create-device-config-defaults")
+                .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
+        .subcommand(
+            Command::new("create-device-config-sysprops")
+                .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
+        .subcommand(
+            Command::new("dump")
+                .arg(Arg::new("cache").long("cache").action(ArgAction::Append).required(true))
+                .arg(
+                    Arg::new("format")
+                        .long("format")
+                        .value_parser(EnumValueParser::<commands::DumpFormat>::new())
+                        .default_value("text"),
+                )
+                .arg(Arg::new("out").long("out").default_value("-")),
+        )
+}
+
+fn get_required_arg<'a, T>(matches: &'a ArgMatches, arg_name: &str) -> Result<&'a T>
+where
+    T: Any + Clone + Send + Sync + 'static,
+{
+    matches
+        .get_one::<T>(arg_name)
+        .ok_or(anyhow!("internal error: required argument '{}' not found", arg_name))
+}
+
+fn open_zero_or_more_files(matches: &ArgMatches, arg_name: &str) -> Result<Vec<Input>> {
+    let mut opened_files = vec![];
+    for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
+        let file = Box::new(fs::File::open(path)?);
+        opened_files.push(Input { source: Source::File(path.to_string()), reader: file });
+    }
+    Ok(opened_files)
+}
+
+fn write_output_file_realtive_to_dir(root: &Path, output_file: &OutputFile) -> Result<()> {
+    ensure!(
+        root.is_dir(),
+        "output directory {} does not exist or is not a directory",
+        root.display()
+    );
+    let path = root.join(output_file.path.clone());
+    let parent = path
+        .parent()
+        .ok_or(anyhow!("unable to locate parent of output file {}", path.display()))?;
+    fs::create_dir_all(parent)?;
+    let mut file = fs::File::create(path)?;
+    file.write_all(&output_file.contents)?;
+    Ok(())
+}
+
+fn write_output_to_file_or_stdout(path: &str, data: &[u8]) -> Result<()> {
+    if path == "-" {
+        io::stdout().write_all(data)?;
+    } else {
+        fs::File::create(path)?.write_all(data)?;
+    }
+    Ok(())
+}
+
+fn main() -> Result<()> {
+    let matches = cli().get_matches();
+    match matches.subcommand() {
+        Some(("create-cache", sub_matches)) => {
+            let package = get_required_arg::<String>(sub_matches, "package")?;
+            let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
+            let values = open_zero_or_more_files(sub_matches, "values")?;
+            let cache = commands::create_cache(package, declarations, values)?;
+            let path = get_required_arg::<String>(sub_matches, "cache")?;
+            let file = fs::File::create(path)?;
+            cache.write_to_writer(file)?;
+        }
+        Some(("create-java-lib", sub_matches)) => {
+            let path = get_required_arg::<String>(sub_matches, "cache")?;
+            let file = fs::File::open(path)?;
+            let cache = Cache::read_from_reader(file)?;
+            let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+            let generated_file = commands::create_java_lib(cache)?;
+            write_output_file_realtive_to_dir(&dir, &generated_file)?;
+        }
+        Some(("create-cpp-lib", sub_matches)) => {
+            let path = get_required_arg::<String>(sub_matches, "cache")?;
+            let file = fs::File::open(path)?;
+            let cache = Cache::read_from_reader(file)?;
+            let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+            let generated_file = commands::create_cpp_lib(cache)?;
+            write_output_file_realtive_to_dir(&dir, &generated_file)?;
+        }
+        Some(("create-rust-lib", sub_matches)) => {
+            let path = get_required_arg::<String>(sub_matches, "cache")?;
+            let file = fs::File::open(path)?;
+            let cache = Cache::read_from_reader(file)?;
+            let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+            let generated_file = commands::create_rust_lib(cache)?;
+            write_output_file_realtive_to_dir(&dir, &generated_file)?;
+        }
+        Some(("create-device-config-defaults", sub_matches)) => {
+            let mut caches = Vec::new();
+            for path in sub_matches.get_many::<String>("cache").unwrap_or_default() {
+                let file = fs::File::open(path)?;
+                let cache = Cache::read_from_reader(file)?;
+                caches.push(cache);
+            }
+            let output = commands::create_device_config_defaults(caches)?;
+            let path = get_required_arg::<String>(sub_matches, "out")?;
+            write_output_to_file_or_stdout(path, &output)?;
+        }
+        Some(("create-device-config-sysprops", sub_matches)) => {
+            let mut caches = Vec::new();
+            for path in sub_matches.get_many::<String>("cache").unwrap_or_default() {
+                let file = fs::File::open(path)?;
+                let cache = Cache::read_from_reader(file)?;
+                caches.push(cache);
+            }
+            let output = commands::create_device_config_sysprops(caches)?;
+            let path = get_required_arg::<String>(sub_matches, "out")?;
+            write_output_to_file_or_stdout(path, &output)?;
+        }
+        Some(("dump", sub_matches)) => {
+            let mut caches = Vec::new();
+            for path in sub_matches.get_many::<String>("cache").unwrap_or_default() {
+                let file = fs::File::open(path)?;
+                let cache = Cache::read_from_reader(file)?;
+                caches.push(cache);
+            }
+            let format = get_required_arg::<DumpFormat>(sub_matches, "format")?;
+            let output = commands::dump_cache(caches, *format)?;
+            let path = get_required_arg::<String>(sub_matches, "out")?;
+            write_output_to_file_or_stdout(path, &output)?;
+        }
+        _ => unreachable!(),
+    }
+    Ok(())
+}
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
new file mode 100644
index 0000000..cb75692
--- /dev/null
+++ b/tools/aconfig/src/protos.rs
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// When building with the Android tool-chain
+//
+//   - an external crate `aconfig_protos` will be generated
+//   - the feature "cargo" will be disabled
+//
+// When building with cargo
+//
+//   - a local sub-module will be generated in OUT_DIR and included in this file
+//   - the feature "cargo" will be enabled
+//
+// This module hides these differences from the rest of aconfig.
+
+// ---- When building with the Android tool-chain ----
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_values as ProtoFlagValues;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- When building with cargo ----
+#[cfg(feature = "cargo")]
+include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_value as ProtoFlagValue;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_values as ProtoFlagValues;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- Common for both the Android tool-chain and cargo ----
+use anyhow::Result;
+
+pub fn try_from_text_proto<T>(s: &str) -> Result<T>
+where
+    T: protobuf::MessageFull,
+{
+    // warning: parse_from_str does not check if required fields are set
+    protobuf::text_format::parse_from_str(s).map_err(|e| e.into())
+}
diff --git a/tools/aconfig/src/test.rs b/tools/aconfig/src/test.rs
new file mode 100644
index 0000000..76ef005
--- /dev/null
+++ b/tools/aconfig/src/test.rs
@@ -0,0 +1,94 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#[cfg(test)]
+pub mod test_utils {
+    use crate::cache::Cache;
+    use crate::commands::{Input, Source};
+    use itertools;
+
+    pub fn create_cache() -> Cache {
+        crate::commands::create_cache(
+            "com.android.aconfig.test",
+            vec![Input {
+                source: Source::File("tests/test.aconfig".to_string()),
+                reader: Box::new(include_bytes!("../tests/test.aconfig").as_slice()),
+            }],
+            vec![
+                Input {
+                    source: Source::File("tests/first.values".to_string()),
+                    reader: Box::new(include_bytes!("../tests/first.values").as_slice()),
+                },
+                Input {
+                    source: Source::File("tests/test.aconfig".to_string()),
+                    reader: Box::new(include_bytes!("../tests/second.values").as_slice()),
+                },
+            ],
+        )
+        .unwrap()
+    }
+
+    pub fn first_significant_code_diff(a: &str, b: &str) -> Option<String> {
+        let a = a.lines().map(|line| line.trim_start()).filter(|line| !line.is_empty());
+        let b = b.lines().map(|line| line.trim_start()).filter(|line| !line.is_empty());
+        match itertools::diff_with(a, b, |left, right| left == right) {
+            Some(itertools::Diff::FirstMismatch(_, mut left, mut right)) => {
+                Some(format!("'{}' vs '{}'", left.next().unwrap(), right.next().unwrap()))
+            }
+            Some(itertools::Diff::Shorter(_, mut left)) => {
+                Some(format!("LHS trailing data: '{}'", left.next().unwrap()))
+            }
+            Some(itertools::Diff::Longer(_, mut right)) => {
+                Some(format!("RHS trailing data: '{}'", right.next().unwrap()))
+            }
+            None => None,
+        }
+    }
+
+    #[test]
+    fn test_first_significant_code_diff() {
+        assert!(first_significant_code_diff("", "").is_none());
+        assert!(first_significant_code_diff("   a", "\n\na\n").is_none());
+        let a = r#"
+        public class A {
+            private static final String FOO = "FOO";
+            public static void main(String[] args) {
+                System.out.println("FOO=" + FOO);
+            }
+        }
+        "#;
+        let b = r#"
+        public class A {
+            private static final String FOO = "BAR";
+            public static void main(String[] args) {
+                System.out.println("foo=" + FOO);
+            }
+        }
+        "#;
+        assert_eq!(Some(r#"'private static final String FOO = "FOO";' vs 'private static final String FOO = "BAR";'"#.to_string()), first_significant_code_diff(a, b));
+        assert_eq!(
+            Some("LHS trailing data: 'b'".to_string()),
+            first_significant_code_diff("a\nb", "a")
+        );
+        assert_eq!(
+            Some("RHS trailing data: 'b'".to_string()),
+            first_significant_code_diff("a", "a\nb")
+        );
+    }
+}
+
+#[cfg(test)]
+pub use test_utils::*;
diff --git a/tools/aconfig/templates/cpp.template b/tools/aconfig/templates/cpp.template
new file mode 100644
index 0000000..aa36d94
--- /dev/null
+++ b/tools/aconfig/templates/cpp.template
@@ -0,0 +1,21 @@
+#ifndef {header}_HEADER_H
+#define {header}_HEADER_H
+{{ if readwrite }}
+#include <server_configurable_flags/get_flags.h>
+using namespace server_configurable_flags;
+{{ endif }}
+namespace {cpp_namespace} \{
+    {{ for item in class_elements}}
+    static const bool {item.flag_name}() \{
+        {{ if item.readwrite- }}
+        return GetServerConfigurableFlag(
+            "{item.device_config_namespace}",
+            "{item.device_config_flag}",
+            "{item.default_value}") == "true";
+        {{ -else- }}
+            return {item.default_value};
+        {{ -endif }}
+    }
+    {{ endfor }}
+}
+#endif
diff --git a/tools/aconfig/templates/java.template b/tools/aconfig/templates/java.template
new file mode 100644
index 0000000..a3d3319
--- /dev/null
+++ b/tools/aconfig/templates/java.template
@@ -0,0 +1,19 @@
+package {package};
+{{ if readwrite }}
+import android.provider.DeviceConfig;
+{{ endif }}
+public final class Flags \{
+    {{ for item in class_elements}}
+    public static boolean {item.method_name}() \{
+        {{ if item.readwrite- }}
+        return DeviceConfig.getBoolean(
+            "{item.device_config_namespace}",
+            "{item.device_config_flag}",
+            {item.default_value}
+        );
+        {{ -else- }}
+        return {item.default_value};
+        {{ -endif }}
+    }
+    {{ endfor }}
+}
diff --git a/tools/aconfig/templates/rust.template b/tools/aconfig/templates/rust.template
new file mode 100644
index 0000000..d914943
--- /dev/null
+++ b/tools/aconfig/templates/rust.template
@@ -0,0 +1,29 @@
+{{- for mod in modules -}}
+pub mod {mod} \{
+{{ endfor -}}
+{{- for parsed_flag in parsed_flags -}}
+{{- if parsed_flag.is_read_only_disabled -}}
+#[inline(always)]
+pub const fn r#{parsed_flag.name}() -> bool \{
+    false
+}
+
+{{ endif -}}
+{{- if parsed_flag.is_read_only_enabled -}}
+#[inline(always)]
+pub const fn r#{parsed_flag.name}() -> bool \{
+    true
+}
+
+{{ endif -}}
+{{- if parsed_flag.is_read_write -}}
+#[inline(always)]
+pub fn r#{parsed_flag.name}() -> bool \{
+    flags_rust::GetServerConfigurableFlag("{parsed_flag.device_config_namespace}", "{parsed_flag.device_config_flag}", "false") == "true"
+}
+
+{{ endif -}}
+{{- endfor -}}
+{{- for mod in modules -}}
+}
+{{ endfor -}}
diff --git a/tools/aconfig/tests/AconfigTest.java b/tools/aconfig/tests/AconfigTest.java
new file mode 100644
index 0000000..5db490b
--- /dev/null
+++ b/tools/aconfig/tests/AconfigTest.java
@@ -0,0 +1,37 @@
+import static com.android.aconfig.test.Flags.disabled_ro;
+import static com.android.aconfig.test.Flags.disabled_rw;
+import static com.android.aconfig.test.Flags.enabled_ro;
+import static com.android.aconfig.test.Flags.enabled_rw;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.JUnit4;
+
+@RunWith(JUnit4.class)
+public final class AconfigTest {
+    @Test
+    public void testDisabledReadOnlyFlag() {
+        assertFalse(disabled_ro());
+    }
+
+    @Test
+    public void testEnabledReadOnlyFlag() {
+        // TODO: change to assertTrue(enabled_ro()) when the build supports reading tests/*.values
+        // (currently all flags are assigned the default READ_ONLY + DISABLED)
+        assertFalse(enabled_ro());
+    }
+
+    @Test
+    public void testDisabledReadWriteFlag() {
+        assertFalse(disabled_rw());
+    }
+
+    @Test
+    public void testEnabledReadWriteFlag() {
+        // TODO: change to assertTrue(enabled_rw()) when the build supports reading tests/*.values
+        // (currently all flags are assigned the default READ_ONLY + DISABLED)
+        assertFalse(enabled_rw());
+    }
+}
diff --git a/tools/aconfig/tests/AndroidManifest.xml b/tools/aconfig/tests/AndroidManifest.xml
new file mode 100644
index 0000000..04002e6
--- /dev/null
+++ b/tools/aconfig/tests/AndroidManifest.xml
@@ -0,0 +1,29 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+
+          http://www.apache.org/licenses/LICENSE-2.0
+
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+
+<manifest xmlns:android="http://schemas.android.com/apk/res/android"
+    package="aconfig.test.java">
+
+    <uses-permission android:name="android.permission.READ_DEVICE_CONFIG" />
+
+    <application>
+        <uses-library android:name="android.test.runner"/>
+    </application>
+
+    <instrumentation android:name="androidx.test.runner.AndroidJUnitRunner"
+        android:targetPackage="aconfig.test.java"
+        android:label="aconfig integration tests (java)" />
+</manifest>
diff --git a/tools/aconfig/tests/first.values b/tools/aconfig/tests/first.values
new file mode 100644
index 0000000..e524404
--- /dev/null
+++ b/tools/aconfig/tests/first.values
@@ -0,0 +1,18 @@
+flag_value {
+    package: "com.android.aconfig.test"
+    name: "disabled_ro"
+    state: DISABLED
+    permission: READ_ONLY
+}
+flag_value {
+    package: "com.android.aconfig.test"
+    name: "enabled_ro"
+    state: DISABLED
+    permission: READ_WRITE
+}
+flag_value {
+    package: "com.android.aconfig.test"
+    name: "enabled_rw"
+    state: ENABLED
+    permission: READ_WRITE
+}
diff --git a/tools/aconfig/tests/second.values b/tools/aconfig/tests/second.values
new file mode 100644
index 0000000..aa09cf6
--- /dev/null
+++ b/tools/aconfig/tests/second.values
@@ -0,0 +1,6 @@
+flag_value {
+    package: "com.android.aconfig.test"
+    name: "enabled_ro"
+    state: ENABLED
+    permission: READ_ONLY
+}
diff --git a/tools/aconfig/tests/test.aconfig b/tools/aconfig/tests/test.aconfig
new file mode 100644
index 0000000..d09396a
--- /dev/null
+++ b/tools/aconfig/tests/test.aconfig
@@ -0,0 +1,37 @@
+package: "com.android.aconfig.test"
+
+# This flag's final value is calculated from:
+# - test.aconfig: DISABLED + READ_WRITE (default)
+# - first.values: DISABLED + READ_ONLY
+flag {
+    name: "disabled_ro"
+    namespace: "aconfig_test"
+    description: "This flag is DISABLED + READ_ONLY"
+}
+
+# This flag's final value is calculated from:
+# - test.aconfig: DISABLED + READ_WRITE (default)
+flag {
+    name: "disabled_rw"
+    namespace: "aconfig_test"
+    description: "This flag is DISABLED + READ_WRITE"
+}
+
+# This flag's final value is calculated from:
+# - test.aconfig: DISABLED + READ_WRITE (default)
+# - first.values: DISABLED + READ_WRITE
+# - second.values: ENABLED + READ_ONLY
+flag {
+    name: "enabled_ro"
+    namespace: "aconfig_test"
+    description: "This flag is ENABLED + READ_ONLY"
+}
+
+# This flag's final value is calculated from:
+# - test.aconfig: DISABLED + READ_WRITE (default)
+# - first.values: ENABLED + READ_WRITE
+flag {
+    name: "enabled_rw"
+    namespace: "aconfig_test"
+    description: "This flag is ENABLED + READ_WRITE"
+}
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index 8e13f2f..ef5c760 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -138,6 +138,11 @@
         "compliance-module",
         "blueprint-deptools",
         "soong-response",
+        "spdx-tools-spdxv2_2",
+        "spdx-tools-builder2v2",
+        "spdx-tools-spdxcommon",
+        "spdx-tools-spdx-json",
+        "spdx-tools-spdxlib",
     ],
     testSrcs: ["cmd/sbom/sbom_test.go"],
 }
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
index 0f8a876..a53741f 100644
--- a/tools/compliance/cmd/sbom/sbom.go
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -16,6 +16,8 @@
 
 import (
 	"bytes"
+	"crypto/sha1"
+	"encoding/hex"
 	"flag"
 	"fmt"
 	"io"
@@ -31,6 +33,12 @@
 	"android/soong/tools/compliance/projectmetadata"
 
 	"github.com/google/blueprint/deptools"
+
+	"github.com/spdx/tools-golang/builder/builder2v2"
+	spdx_json "github.com/spdx/tools-golang/json"
+	"github.com/spdx/tools-golang/spdx/common"
+	spdx "github.com/spdx/tools-golang/spdx/v2_2"
+	"github.com/spdx/tools-golang/spdxlib"
 )
 
 var (
@@ -38,6 +46,8 @@
 	failNoLicenses    = fmt.Errorf("No licenses found")
 )
 
+const NOASSERTION = "NOASSERTION"
+
 type context struct {
 	stdout       io.Writer
 	stderr       io.Writer
@@ -45,6 +55,7 @@
 	product      string
 	stripPrefix  []string
 	creationTime creationTimeGetter
+	buildid      string
 }
 
 func (ctx context) strip(installPath string) string {
@@ -114,6 +125,7 @@
 	depsFile := flags.String("d", "", "Where to write the deps file")
 	product := flags.String("product", "", "The name of the product for which the notice is generated.")
 	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	buildid := flags.String("build_id", "", "Uniquely identifies the build. (default timestamp)")
 
 	flags.Parse(expandedArgs)
 
@@ -152,9 +164,10 @@
 		ofile = obuf
 	}
 
-	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime, *buildid}
 
-	deps, err := sbomGenerator(ctx, flags.Args()...)
+	spdxDoc, deps, err := sbomGenerator(ctx, flags.Args()...)
+
 	if err != nil {
 		if err == failNoneRequested {
 			flags.Usage()
@@ -163,6 +176,12 @@
 		os.Exit(1)
 	}
 
+	// writing the spdx Doc created
+	if err := spdx_json.Save2_2(spdxDoc, ofile); err != nil {
+		fmt.Fprintf(os.Stderr, "failed to write document to %v: %v", *outputFile, err)
+		os.Exit(1)
+	}
+
 	if *outputFile != "-" {
 		err := os.WriteFile(*outputFile, obuf.Bytes(), 0666)
 		if err != nil {
@@ -181,11 +200,12 @@
 	os.Exit(0)
 }
 
-type creationTimeGetter func() time.Time
+type creationTimeGetter func() string
 
 // actualTime returns current time in UTC
-func actualTime() time.Time {
-	return time.Now().UTC()
+func actualTime() string {
+	t := time.Now().UTC()
+	return t.UTC().Format("2006-01-02T15:04:05Z")
 }
 
 // replaceSlashes replaces "/" by "-" for the library path to be used for packages & files SPDXID
@@ -193,6 +213,23 @@
 	return strings.ReplaceAll(x, "/", "-")
 }
 
+// stripDocName removes the outdir prefix and meta_lic suffix from a target Name
+func stripDocName(name string) string {
+	// remove outdir prefix
+	if strings.HasPrefix(name, "out/") {
+		name = name[4:]
+	}
+
+	// remove suffix
+	if strings.HasSuffix(name, ".meta_lic") {
+		name = name[:len(name)-9]
+	} else if strings.HasSuffix(name, "/meta_lic") {
+		name = name[:len(name)-9] + "/"
+	}
+
+	return name
+}
+
 // getPackageName returns a package name of a target Node
 func getPackageName(_ *context, tn *compliance.TargetNode) string {
 	return replaceSlashes(tn.Name())
@@ -210,25 +247,24 @@
 		return replaceSlashes(tn.ModuleName())
 	}
 
-	// TO DO: Replace tn.Name() with pm.Name() + parts of the target name
-	return replaceSlashes(tn.Name())
+	return stripDocName(replaceSlashes(tn.Name()))
 }
 
 // getDownloadUrl returns the download URL if available (GIT, SVN, etc..),
 // or NOASSERTION if not available, none determined or ambiguous
 func getDownloadUrl(_ *context, pm *projectmetadata.ProjectMetadata) string {
 	if pm == nil {
-		return "NOASSERTION"
+		return NOASSERTION
 	}
 
 	urlsByTypeName := pm.UrlsByTypeName()
 	if urlsByTypeName == nil {
-		return "NOASSERTION"
+		return NOASSERTION
 	}
 
 	url := urlsByTypeName.DownloadUrl()
 	if url == "" {
-		return "NOASSERTION"
+		return NOASSERTION
 	}
 	return url
 }
@@ -238,7 +274,7 @@
 	tn *compliance.TargetNode) (*projectmetadata.ProjectMetadata, error) {
 	pms, err := pmix.MetadataForProjects(tn.Projects()...)
 	if err != nil {
-		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn, err)
+		return nil, fmt.Errorf("Unable to read projects for %q: %w\n", tn.Name(), err)
 	}
 	if len(pms) == 0 {
 		return nil, nil
@@ -274,7 +310,7 @@
 // inputFiles returns the complete list of files read
 func inputFiles(lg *compliance.LicenseGraph, pmix *projectmetadata.Index, licenseTexts []string) []string {
 	projectMeta := pmix.AllMetadataFiles()
-	targets :=  lg.TargetNames()
+	targets := lg.TargetNames()
 	files := make([]string, 0, len(licenseTexts)+len(targets)+len(projectMeta))
 	files = append(files, licenseTexts...)
 	files = append(files, targets...)
@@ -282,6 +318,26 @@
 	return files
 }
 
+// generateSPDXNamespace generates a unique SPDX Document Namespace using a SHA1 checksum
+func generateSPDXNamespace(buildid string, created string, files ...string) string {
+
+	seed := strings.Join(files, "")
+
+	if buildid == "" {
+		seed += created
+	} else {
+		seed += buildid
+	}
+
+	// Compute a SHA1 checksum of the seed.
+	hash := sha1.Sum([]byte(seed))
+	uuid := hex.EncodeToString(hash[:])
+
+	namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s", uuid)
+
+	return namespace
+}
+
 // sbomGenerator implements the spdx bom utility
 
 // SBOM is part of the new government regulation issued to improve national cyber security
@@ -289,10 +345,10 @@
 
 // sbomGenerator uses the SPDX standard, see the SPDX specification (https://spdx.github.io/spdx-spec/)
 // sbomGenerator is also following the internal google SBOM styleguide (http://goto.google.com/spdx-style-guide)
-func sbomGenerator(ctx *context, files ...string) ([]string, error) {
+func sbomGenerator(ctx *context, files ...string) (*spdx.Document, []string, error) {
 	// Must be at least one root file.
 	if len(files) < 1 {
-		return nil, failNoneRequested
+		return nil, nil, failNoneRequested
 	}
 
 	pmix := projectmetadata.NewIndex(ctx.rootFS)
@@ -300,9 +356,24 @@
 	lg, err := compliance.ReadLicenseGraph(ctx.rootFS, ctx.stderr, files)
 
 	if err != nil {
-		return nil, fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
+		return nil, nil, fmt.Errorf("Unable to read license text file(s) for %q: %v\n", files, err)
 	}
 
+	// creating the packages section
+	pkgs := []*spdx.Package{}
+
+	// creating the relationship section
+	relationships := []*spdx.Relationship{}
+
+	// creating the license section
+	otherLicenses := []*spdx.OtherLicense{}
+
+	// spdx document name
+	var docName string
+
+	// main package name
+	var mainPkgName string
+
 	// implementing the licenses references for the packages
 	licenses := make(map[string]string)
 	concludedLicenses := func(licenseTexts []string) string {
@@ -325,7 +396,6 @@
 	}
 
 	isMainPackage := true
-	var mainPackage string
 	visitedNodes := make(map[*compliance.TargetNode]struct{})
 
 	// performing a Breadth-first top down walk of licensegraph and building package information
@@ -341,45 +411,51 @@
 			}
 
 			if isMainPackage {
-				mainPackage = getDocumentName(ctx, tn, pm)
-				fmt.Fprintf(ctx.stdout, "SPDXVersion: SPDX-2.2\n")
-				fmt.Fprintf(ctx.stdout, "DataLicense: CC0-1.0\n")
-				fmt.Fprintf(ctx.stdout, "DocumentName: %s\n", mainPackage)
-				fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-DOCUMENT\n")
-				fmt.Fprintf(ctx.stdout, "DocumentNamespace: Android\n")
-				fmt.Fprintf(ctx.stdout, "Creator: Organization: Google LLC\n")
-				fmt.Fprintf(ctx.stdout, "Created: %s\n", ctx.creationTime().Format("2006-01-02T15:04:05Z"))
+				docName = getDocumentName(ctx, tn, pm)
+				mainPkgName = replaceSlashes(getPackageName(ctx, tn))
 				isMainPackage = false
 			}
 
-			relationships := make([]string, 0, 1)
-			defer func() {
-				if r := recover(); r != nil {
-					panic(r)
-				}
-				for _, relationship := range relationships {
-					fmt.Fprintln(ctx.stdout, relationship)
-				}
-			}()
 			if len(path) == 0 {
-				relationships = append(relationships,
-					fmt.Sprintf("Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-Package-%s",
-						getPackageName(ctx, tn)))
+				// Add the describe relationship for the main package
+				rln := &spdx.Relationship{
+					RefA:         common.MakeDocElementID("" /* this document */, "DOCUMENT"),
+					RefB:         common.MakeDocElementID("", mainPkgName),
+					Relationship: "DESCRIBES",
+				}
+				relationships = append(relationships, rln)
+
 			} else {
 				// Check parent and identify annotation
 				parent := path[len(path)-1]
 				targetEdge := parent.Edge()
 				if targetEdge.IsRuntimeDependency() {
 					// Adding the dynamic link annotation RUNTIME_DEPENDENCY_OF relationship
-					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s RUNTIME_DEPENDENCY_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					}
+					relationships = append(relationships, rln)
 
 				} else if targetEdge.IsDerivation() {
 					// Adding the  derivation annotation as a CONTAINS relationship
-					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s CONTAINS SPDXRef-Package-%s", getPackageName(ctx, targetEdge.Target()), getPackageName(ctx, tn)))
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						Relationship: "CONTAINS",
+					}
+					relationships = append(relationships, rln)
 
 				} else if targetEdge.IsBuildTool() {
 					// Adding the toolchain annotation as a BUILD_TOOL_OF relationship
-					relationships = append(relationships, fmt.Sprintf("Relationship: SPDXRef-Package-%s BUILD_TOOL_OF SPDXRef-Package-%s", getPackageName(ctx, tn), getPackageName(ctx, targetEdge.Target())))
+					rln := &spdx.Relationship{
+						RefA:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, tn))),
+						RefB:         common.MakeDocElementID("", replaceSlashes(getPackageName(ctx, targetEdge.Target()))),
+						Relationship: "BUILD_TOOL_OF",
+					}
+					relationships = append(relationships, rln)
+
 				} else {
 					panic(fmt.Errorf("Unknown dependency type: %v", targetEdge.Annotations()))
 				}
@@ -390,18 +466,27 @@
 			}
 			visitedNodes[tn] = struct{}{}
 			pkgName := getPackageName(ctx, tn)
-			fmt.Fprintf(ctx.stdout, "##### Package: %s\n", strings.Replace(pkgName, "-", "/", -2))
-			fmt.Fprintf(ctx.stdout, "PackageName: %s\n", pkgName)
-			if pm != nil && pm.Version() != "" {
-				fmt.Fprintf(ctx.stdout, "PackageVersion: %s\n", pm.Version())
+
+			// Making an spdx package and adding it to pkgs
+			pkg := &spdx.Package{
+				PackageName:             replaceSlashes(pkgName),
+				PackageDownloadLocation: getDownloadUrl(ctx, pm),
+				PackageSPDXIdentifier:   common.ElementID(replaceSlashes(pkgName)),
+				PackageLicenseConcluded: concludedLicenses(tn.LicenseTexts()),
 			}
-			fmt.Fprintf(ctx.stdout, "SPDXID: SPDXRef-Package-%s\n", pkgName)
-			fmt.Fprintf(ctx.stdout, "PackageDownloadLocation: %s\n", getDownloadUrl(ctx, pm))
-			fmt.Fprintf(ctx.stdout, "PackageLicenseConcluded: %s\n", concludedLicenses(tn.LicenseTexts()))
+
+			if pm != nil && pm.Version() != "" {
+				pkg.PackageVersion = pm.Version()
+			} else {
+				pkg.PackageVersion = NOASSERTION
+			}
+
+			pkgs = append(pkgs, pkg)
+
 			return true
 		})
 
-	fmt.Fprintf(ctx.stdout, "##### Non-standard license:\n")
+	// Adding Non-standard licenses
 
 	licenseTexts := make([]string, 0, len(licenses))
 
@@ -412,23 +497,51 @@
 	sort.Strings(licenseTexts)
 
 	for _, licenseText := range licenseTexts {
-		fmt.Fprintf(ctx.stdout, "LicenseID: %s\n", licenses[licenseText])
 		// open the file
 		f, err := ctx.rootFS.Open(filepath.Clean(licenseText))
 		if err != nil {
-			return nil, fmt.Errorf("error opening license text file %q: %w", licenseText, err)
+			return nil, nil, fmt.Errorf("error opening license text file %q: %w", licenseText, err)
 		}
 
 		// read the file
 		text, err := io.ReadAll(f)
 		if err != nil {
-			return nil, fmt.Errorf("error reading license text file %q: %w", licenseText, err)
+			return nil, nil, fmt.Errorf("error reading license text file %q: %w", licenseText, err)
 		}
-		// adding the extracted license text
-		fmt.Fprintf(ctx.stdout, "ExtractedText: <text>%v</text>\n", string(text))
+		// Making an spdx License and adding it to otherLicenses
+		otherLicenses = append(otherLicenses, &spdx.OtherLicense{
+			LicenseName:       strings.Replace(licenses[licenseText], "LicenseRef-", "", -1),
+			LicenseIdentifier: string(licenses[licenseText]),
+			ExtractedText:     string(text),
+		})
 	}
 
 	deps := inputFiles(lg, pmix, licenseTexts)
 	sort.Strings(deps)
-	return deps, nil
+
+	// Making the SPDX doc
+	ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
+	if err != nil {
+		return nil, nil, fmt.Errorf("Unable to build creation info section for SPDX doc: %v\n", err)
+	}
+
+	ci.Created = ctx.creationTime()
+
+	doc := &spdx.Document{
+		SPDXVersion:       "SPDX-2.2",
+		DataLicense:       "CC0-1.0",
+		SPDXIdentifier:    "DOCUMENT",
+		DocumentName:      docName,
+		DocumentNamespace: generateSPDXNamespace(ctx.buildid, ci.Created, files...),
+		CreationInfo:      ci,
+		Packages:          pkgs,
+		Relationships:     relationships,
+		OtherLicenses:     otherLicenses,
+	}
+
+	if err := spdxlib.ValidateDocument2_2(doc); err != nil {
+		return nil, nil, fmt.Errorf("Unable to validate the SPDX doc: %v\n", err)
+	}
+
+	return doc, deps, nil
 }
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
index 6df74e2..13ba66d 100644
--- a/tools/compliance/cmd/sbom/sbom_test.go
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -15,37 +15,20 @@
 package main
 
 import (
-	"bufio"
 	"bytes"
+	"encoding/json"
 	"fmt"
 	"os"
 	"reflect"
-	"regexp"
 	"strings"
 	"testing"
 	"time"
 
 	"android/soong/tools/compliance"
-)
 
-var (
-	spdxVersionTag              = regexp.MustCompile(`^\s*SPDXVersion: SPDX-2.2\s*$`)
-	spdxDataLicenseTag          = regexp.MustCompile(`^\s*DataLicense: CC0-1.0\s*$`)
-	spdxDocumentNameTag         = regexp.MustCompile(`^\s*DocumentName:\s*Android*\s*$`)
-	spdxIDTag                   = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-DOCUMENT\s*$`)
-	spdxDocumentNameSpaceTag    = regexp.MustCompile(`^\s*DocumentNamespace:\s*Android\s*$`)
-	spdxCreatorOrganizationTag  = regexp.MustCompile(`^\s*Creator:\s*Organization:\s*Google LLC\s*$`)
-	spdxCreatedTimeTag          = regexp.MustCompile(`^\s*Created: 1970-01-01T00:00:00Z\s*$`)
-	spdxPackageTag              = regexp.MustCompile(`^\s*#####\s*Package:\s*(.*)\s*$`)
-	spdxPackageNameTag          = regexp.MustCompile(`^\s*PackageName:\s*(.*)\s*$`)
-	spdxPkgIDTag                = regexp.MustCompile(`^\s*SPDXID:\s*SPDXRef-Package-(.*)\s*$`)
-	spdxPkgDownloadLocationTag  = regexp.MustCompile(`^\s*PackageDownloadLocation:\s*NOASSERTION\s*$`)
-	spdxPkgLicenseDeclaredTag   = regexp.MustCompile(`^\s*PackageLicenseConcluded:\s*LicenseRef-(.*)\s*$`)
-	spdxRelationshipTag         = regexp.MustCompile(`^\s*Relationship:\s*SPDXRef-(.*)\s*(DESCRIBES|CONTAINS|BUILD_TOOL_OF|RUNTIME_DEPENDENCY_OF)\s*SPDXRef-Package-(.*)\s*$`)
-	spdxLicenseTag              = regexp.MustCompile(`^\s*##### Non-standard license:\s*$`)
-	spdxLicenseIDTag            = regexp.MustCompile(`^\s*LicenseID: LicenseRef-(.*)\s*$`)
-	spdxExtractedTextTag        = regexp.MustCompile(`^\s*ExtractedText:\s*<text>(.*)\s*$`)
-	spdxExtractedClosingTextTag = regexp.MustCompile(`^\s*</text>\s*$`)
+	"github.com/spdx/tools-golang/builder/builder2v2"
+	"github.com/spdx/tools-golang/spdx/common"
+	spdx "github.com/spdx/tools-golang/spdx/v2_2"
 )
 
 func TestMain(m *testing.M) {
@@ -65,69 +48,125 @@
 		outDir       string
 		roots        []string
 		stripPrefix  string
-		expectedOut  []matcher
+		expectedOut  *spdx.Document
 		expectedDeps []string
 	}{
 		{
 			condition: "firstparty",
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/firstparty/highest.apex.meta_lic"},
-				packageName{"testdata/firstparty/highest.apex.meta_lic"},
-				spdxPkgID{"testdata/firstparty/highest.apex.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata-firstparty-highest.apex.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata-firstparty-bin-bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/highest.apex.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
-				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata-firstparty-bin-bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/highest.apex.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -144,42 +183,72 @@
 			condition: "firstparty",
 			name:      "application",
 			roots:     []string{"application.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/firstparty/application.meta_lic"},
-				packageName{"testdata/firstparty/application.meta_lic"},
-				spdxPkgID{"testdata/firstparty/application.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/firstparty/application.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/firstparty/bin/bin3.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin3.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin3.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin3.meta_lic ", "testdata-firstparty-application.meta_lic", "BUILD_TOOL_OF"},
-				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/application.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata-firstparty-application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-application",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/application.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -193,62 +262,118 @@
 			condition: "firstparty",
 			name:      "container",
 			roots:     []string{"container.zip.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/firstparty/container.zip.meta_lic"},
-				packageName{"testdata/firstparty/container.zip.meta_lic"},
-				spdxPkgID{"testdata/firstparty/container.zip.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/firstparty/container.zip.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/bin/bin2.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libb.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/container.zip.meta_lic ", "testdata/firstparty/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
-				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/firstparty/lib/libb.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/lib/libd.so.meta_lic ", "testdata/firstparty/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-container.zip",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/container.zip.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -265,36 +390,60 @@
 			condition: "firstparty",
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/firstparty/bin/bin1.meta_lic"},
-				packageName{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/firstparty/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/firstparty/bin/bin1.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/firstparty/lib/liba.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/firstparty/lib/libc.a.meta_lic"},
-				packageName{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/firstparty/bin/bin1.meta_lic ", "testdata/firstparty/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/bin/bin1.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-firstparty-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-firstparty-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -307,24 +456,36 @@
 			condition: "firstparty",
 			name:      "library",
 			roots:     []string{"lib/libd.so.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/firstparty/lib/libd.so.meta_lic"},
-				packageName{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/firstparty/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/firstparty/lib/libd.so.meta_lic", "DESCRIBES"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-firstparty-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/lib/libd.so.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-firstparty-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-firstparty-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-firstparty-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -335,65 +496,123 @@
 			condition: "notice",
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/notice/highest.apex.meta_lic"},
-				packageName{"testdata/notice/highest.apex.meta_lic"},
-				spdxPkgID{"testdata/notice/highest.apex.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/notice/highest.apex.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/notice/bin/bin1.meta_lic"},
-				packageName{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/bin/bin2.meta_lic"},
-				packageName{"testdata/notice/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
-				packageName{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
-				packageName{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/highest.apex.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
-				packageName{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
-				packageName{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/highest.apex.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -411,65 +630,123 @@
 			condition: "notice",
 			name:      "container",
 			roots:     []string{"container.zip.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/notice/container.zip.meta_lic"},
-				packageName{"testdata/notice/container.zip.meta_lic"},
-				spdxPkgID{"testdata/notice/container.zip.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/notice/container.zip.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/notice/bin/bin1.meta_lic"},
-				packageName{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/bin/bin2.meta_lic"},
-				packageName{"testdata/notice/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
-				packageName{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
-				packageName{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/container.zip.meta_lic ", "testdata/notice/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
-				packageName{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/notice/lib/libb.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
-				packageName{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/lib/libd.so.meta_lic ", "testdata/notice/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-container.zip",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/container.zip.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -487,45 +764,77 @@
 			condition: "notice",
 			name:      "application",
 			roots:     []string{"application.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/notice/application.meta_lic"},
-				packageName{"testdata/notice/application.meta_lic"},
-				spdxPkgID{"testdata/notice/application.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata-notice-application.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/notice/bin/bin3.meta_lic"},
-				packageName{"testdata/notice/bin/bin3.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin3.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata-notice-bin-bin3.meta_lic ", "testdata/notice/application.meta_lic", "BUILD_TOOL_OF"},
-				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
-				packageName{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/application.meta_lic ", "testdata-notice-lib-liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libb.so.meta_lic"},
-				packageName{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata-notice-lib-libb.so.meta_lic ", "testdata/notice/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-application",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/application.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -540,39 +849,65 @@
 			condition: "notice",
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/notice/bin/bin1.meta_lic"},
-				packageName{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/notice/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/notice/bin/bin1.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/notice/lib/liba.so.meta_lic"},
-				packageName{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/notice/lib/libc.a.meta_lic"},
-				packageName{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/notice/bin/bin1.meta_lic ", "testdata/notice/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/bin/bin1.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-notice-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-notice-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -586,24 +921,36 @@
 			condition: "notice",
 			name:      "library",
 			roots:     []string{"lib/libd.so.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/notice/lib/libd.so.meta_lic"},
-				packageName{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/notice/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/notice/lib/libd.so.meta_lic", "DESCRIBES"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-notice-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/lib/libd.so.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-notice-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-notice-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-notice-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
@@ -614,68 +961,128 @@
 			condition: "reciprocal",
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/reciprocal/highest.apex.meta_lic"},
-				packageName{"testdata/reciprocal/highest.apex.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/highest.apex.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/reciprocal/highest.apex.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/highest.apex.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/highest.apex.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -692,130 +1099,84 @@
 		},
 		{
 			condition: "reciprocal",
-			name:      "container",
-			roots:     []string{"container.zip.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/reciprocal/container.zip.meta_lic"},
-				packageName{"testdata/reciprocal/container.zip.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/container.zip.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/reciprocal/container.zip.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/bin/bin2.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata-reciprocal-bin-bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/container.zip.meta_lic ", "testdata/reciprocal/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/lib/libd.so.meta_lic ", "testdata/reciprocal/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
-			},
-			expectedDeps: []string{
-				"testdata/firstparty/FIRST_PARTY_LICENSE",
-				"testdata/notice/NOTICE_LICENSE",
-				"testdata/reciprocal/RECIPROCAL_LICENSE",
-				"testdata/reciprocal/bin/bin1.meta_lic",
-				"testdata/reciprocal/bin/bin2.meta_lic",
-				"testdata/reciprocal/container.zip.meta_lic",
-				"testdata/reciprocal/lib/liba.so.meta_lic",
-				"testdata/reciprocal/lib/libb.so.meta_lic",
-				"testdata/reciprocal/lib/libc.a.meta_lic",
-				"testdata/reciprocal/lib/libd.so.meta_lic",
-			},
-		},
-		{
-			condition: "reciprocal",
 			name:      "application",
 			roots:     []string{"application.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/reciprocal/application.meta_lic"},
-				packageName{"testdata/reciprocal/application.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/application.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/reciprocal/application.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/reciprocal/bin/bin3.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin3.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin3.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata-reciprocal-bin-bin3.meta_lic ", "testdata/reciprocal/application.meta_lic", "BUILD_TOOL_OF"},
-				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/application.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/lib/libb.so.meta_lic ", "testdata/reciprocal/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-application",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/application.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -831,39 +1192,65 @@
 			condition: "reciprocal",
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/reciprocal/bin/bin1.meta_lic"},
-				packageName{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/reciprocal/bin/bin1.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/reciprocal/bin/bin1.meta_lic ", "testdata/reciprocal/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/bin/bin1.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-reciprocal-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-reciprocal-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -877,24 +1264,36 @@
 			condition: "reciprocal",
 			name:      "library",
 			roots:     []string{"lib/libd.so.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				packageName{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/reciprocal/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/reciprocal/lib/libd.so.meta_lic", "DESCRIBES"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-reciprocal-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/lib/libd.so.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-reciprocal-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-reciprocal-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-reciprocal-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
@@ -902,75 +1301,136 @@
 			},
 		},
 		{
-			condition:   "restricted",
-			name:        "apex",
-			roots:       []string{"highest.apex.meta_lic"},
-			stripPrefix: "out/target/product/fictional/system/apex/",
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/restricted/highest.apex.meta_lic"},
-				packageName{"testdata/restricted/highest.apex.meta_lic"},
-				spdxPkgID{"testdata/restricted/highest.apex.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/restricted/highest.apex.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
-				packageName{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/bin/bin2.meta_lic"},
-				packageName{"testdata/restricted/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
-				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
-				packageName{"testdata/restricted/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/highest.apex.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
-				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
-				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			condition: "restricted",
+			name:      "apex",
+			roots:     []string{"highest.apex.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/highest.apex.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -987,75 +1447,136 @@
 			},
 		},
 		{
-			condition:   "restricted",
-			name:        "container",
-			roots:       []string{"container.zip.meta_lic"},
-			stripPrefix: "out/target/product/fictional/system/apex/",
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/restricted/container.zip.meta_lic"},
-				packageName{"testdata/restricted/container.zip.meta_lic"},
-				spdxPkgID{"testdata/restricted/container.zip.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/restricted/container.zip.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
-				packageName{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/bin/bin2.meta_lic"},
-				packageName{"testdata/restricted/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/restricted/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
-				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/libb.so.meta_lic"},
-				packageName{"testdata/restricted/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/container.zip.meta_lic ", "testdata/restricted/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
-				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/restricted/lib/libb.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
-				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/lib/libd.so.meta_lic ", "testdata/restricted/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			condition: "restricted",
+			name:      "container",
+			roots:     []string{"container.zip.meta_lic"},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-container.zip",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/container.zip.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1075,42 +1596,70 @@
 			condition: "restricted",
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/restricted/bin/bin1.meta_lic"},
-				packageName{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/restricted/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/restricted/bin/bin1.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/restricted/lib/liba.so.meta_lic"},
-				packageName{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/restricted/lib/libc.a.meta_lic"},
-				packageName{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxRelationship{"Package-testdata/restricted/bin/bin1.meta_lic ", "testdata/restricted/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-reciprocal-RECIPROCAL_LICENSE"},
-				spdxExtractedText{"$$$Reciprocal License$$$"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/bin/bin1.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-restricted-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-restricted-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-reciprocal-RECIPROCAL_LICENSE",
+						ExtractedText:     "$$$Reciprocal License$$$\n",
+						LicenseName:       "testdata-reciprocal-RECIPROCAL_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1125,24 +1674,36 @@
 			condition: "restricted",
 			name:      "library",
 			roots:     []string{"lib/libd.so.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/restricted/lib/libd.so.meta_lic"},
-				packageName{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/restricted/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/restricted/lib/libd.so.meta_lic", "DESCRIBES"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-restricted-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/lib/libd.so.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-restricted-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-restricted-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-restricted-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
@@ -1153,71 +1714,133 @@
 			condition: "proprietary",
 			name:      "apex",
 			roots:     []string{"highest.apex.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/proprietary/highest.apex.meta_lic"},
-				packageName{"testdata/proprietary/highest.apex.meta_lic"},
-				spdxPkgID{"testdata/proprietary/highest.apex.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/proprietary/highest.apex.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/highest.apex.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
-				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxExtractedText{"@@@Proprietary License@@@"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-highest.apex",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/highest.apex.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-highest.apex.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-highest.apex.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-highest.apex.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1237,71 +1860,133 @@
 			condition: "proprietary",
 			name:      "container",
 			roots:     []string{"container.zip.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/proprietary/container.zip.meta_lic"},
-				packageName{"testdata/proprietary/container.zip.meta_lic"},
-				spdxPkgID{"testdata/proprietary/container.zip.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/proprietary/container.zip.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin1.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/bin/bin2.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin2.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin2.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/container.zip.meta_lic ", "testdata/proprietary/lib/libb.so.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
-				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxRelationship{"Package-testdata-proprietary-lib-libb.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"Package-testdata-proprietary-lib-libd.so.meta_lic ", "testdata/proprietary/bin/bin2.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxExtractedText{"@@@Proprietary License@@@"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-container.zip",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/container.zip.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-container.zip.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-container.zip.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin2.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin2.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-container.zip.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin2.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1321,48 +2006,82 @@
 			condition: "proprietary",
 			name:      "application",
 			roots:     []string{"application.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/proprietary/application.meta_lic"},
-				packageName{"testdata/proprietary/application.meta_lic"},
-				spdxPkgID{"testdata/proprietary/application.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/proprietary/application.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/proprietary/bin/bin3.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin3.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin3.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin3.meta_lic ", "testdata/proprietary/application.meta_lic", "BUILD_TOOL_OF"},
-				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/application.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libb.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libb.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/lib/libb.so.meta_lic ", "testdata/proprietary/application.meta_lic", "RUNTIME_DEPENDENCY_OF"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxExtractedText{"@@@Proprietary License@@@"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-restricted-RESTRICTED_LICENSE"},
-				spdxExtractedText{"###Restricted License###"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-application",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/application.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-application.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-application.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-bin-bin3.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin3.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libb.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libb.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin3.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "BUILD_TOOL_OF",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-lib-libb.so.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-application.meta_lic"),
+						Relationship: "RUNTIME_DEPENDENCY_OF",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-restricted-RESTRICTED_LICENSE",
+						ExtractedText:     "###Restricted License###\n",
+						LicenseName:       "testdata-restricted-RESTRICTED_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1378,39 +2097,65 @@
 			condition: "proprietary",
 			name:      "binary",
 			roots:     []string{"bin/bin1.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/proprietary/bin/bin1.meta_lic"},
-				packageName{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgID{"testdata/proprietary/bin/bin1.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/proprietary/bin/bin1.meta_lic", "DESCRIBES"},
-				packageTag{"testdata/proprietary/lib/liba.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/liba.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/liba.so.meta_lic", "CONTAINS"},
-				packageTag{"testdata/proprietary/lib/libc.a.meta_lic"},
-				packageName{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libc.a.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxRelationship{"Package-testdata/proprietary/bin/bin1.meta_lic ", "testdata/proprietary/lib/libc.a.meta_lic", "CONTAINS"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-firstparty-FIRST_PARTY_LICENSE"},
-				spdxExtractedText{"&&&First Party License&&&"},
-				spdxExtractedClosingText{},
-				spdxLicenseID{"testdata-proprietary-PROPRIETARY_LICENSE"},
-				spdxExtractedText{"@@@Proprietary License@@@"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-bin-bin1",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/bin/bin1.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-bin-bin1.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-bin-bin1.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-liba.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-liba.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+					{
+						PackageName:             "testdata-proprietary-lib-libc.a.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libc.a.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-liba.so.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+					{
+						RefA:         common.MakeDocElementID("", "testdata-proprietary-bin-bin1.meta_lic"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libc.a.meta_lic"),
+						Relationship: "CONTAINS",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-firstparty-FIRST_PARTY_LICENSE",
+						ExtractedText:     "&&&First Party License&&&\n",
+						LicenseName:       "testdata-firstparty-FIRST_PARTY_LICENSE",
+					},
+					{
+						LicenseIdentifier: "LicenseRef-testdata-proprietary-PROPRIETARY_LICENSE",
+						ExtractedText:     "@@@Proprietary License@@@\n",
+						LicenseName:       "testdata-proprietary-PROPRIETARY_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/firstparty/FIRST_PARTY_LICENSE",
@@ -1424,24 +2169,36 @@
 			condition: "proprietary",
 			name:      "library",
 			roots:     []string{"lib/libd.so.meta_lic"},
-			expectedOut: []matcher{
-				spdxVersion{},
-				spdxDataLicense{},
-				spdxDocumentName{"Android"},
-				spdxID{},
-				spdxDocumentNameSpace{},
-				spdxCreatorOrganization{},
-				spdxCreatedTime{},
-				packageTag{"testdata/proprietary/lib/libd.so.meta_lic"},
-				packageName{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgID{"testdata/proprietary/lib/libd.so.meta_lic"},
-				spdxPkgDownloadLocation{"NOASSERTION"},
-				spdxPkgLicenseDeclared{"testdata-notice-NOTICE_LICENSE"},
-				spdxRelationship{"DOCUMENT ", "testdata/proprietary/lib/libd.so.meta_lic", "DESCRIBES"},
-				spdxLicense{},
-				spdxLicenseID{"testdata-notice-NOTICE_LICENSE"},
-				spdxExtractedText{"%%%Notice License%%%"},
-				spdxExtractedClosingText{},
+			expectedOut: &spdx.Document{
+				SPDXVersion:       "SPDX-2.2",
+				DataLicense:       "CC0-1.0",
+				SPDXIdentifier:    "DOCUMENT",
+				DocumentName:      "testdata-proprietary-lib-libd.so",
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/lib/libd.so.meta_lic"),
+				CreationInfo:      getCreationInfo(t),
+				Packages: []*spdx.Package{
+					{
+						PackageName:             "testdata-proprietary-lib-libd.so.meta_lic",
+						PackageVersion:          "NOASSERTION",
+						PackageDownloadLocation: "NOASSERTION",
+						PackageSPDXIdentifier:   common.ElementID("testdata-proprietary-lib-libd.so.meta_lic"),
+						PackageLicenseConcluded: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+					},
+				},
+				Relationships: []*spdx.Relationship{
+					{
+						RefA:         common.MakeDocElementID("", "DOCUMENT"),
+						RefB:         common.MakeDocElementID("", "testdata-proprietary-lib-libd.so.meta_lic"),
+						Relationship: "DESCRIBES",
+					},
+				},
+				OtherLicenses: []*spdx.OtherLicense{
+					{
+						LicenseIdentifier: "LicenseRef-testdata-notice-NOTICE_LICENSE",
+						ExtractedText:     "%%%Notice License%%%\n",
+						LicenseName:       "testdata-notice-NOTICE_LICENSE",
+					},
+				},
 			},
 			expectedDeps: []string{
 				"testdata/notice/NOTICE_LICENSE",
@@ -1459,9 +2216,9 @@
 				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
 			}
 
-			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "Android", []string{tt.stripPrefix}, fakeTime}
+			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime, ""}
 
-			deps, err := sbomGenerator(&ctx, rootFiles...)
+			spdxDoc, deps, err := sbomGenerator(&ctx, rootFiles...)
 			if err != nil {
 				t.Fatalf("sbom: error = %v, stderr = %v", err, stderr)
 				return
@@ -1470,28 +2227,30 @@
 				t.Errorf("sbom: gotStderr = %v, want none", stderr)
 			}
 
-			t.Logf("got stdout: %s", stdout.String())
-
-			t.Logf("want stdout: %s", matcherList(tt.expectedOut).String())
-
-			out := bufio.NewScanner(stdout)
-			lineno := 0
-			for out.Scan() {
-				line := out.Text()
-				if strings.TrimLeft(line, " ") == "" {
-					continue
-				}
-				if len(tt.expectedOut) <= lineno {
-					t.Errorf("sbom: unexpected output at line %d: got %q, want nothing (wanted %d lines)", lineno+1, line, len(tt.expectedOut))
-				} else if !tt.expectedOut[lineno].isMatch(line) {
-					t.Errorf("sbom: unexpected output at line %d: got %q, want %q", lineno+1, line, tt.expectedOut[lineno])
-				}
-				lineno++
-			}
-			for ; lineno < len(tt.expectedOut); lineno++ {
-				t.Errorf("bom: missing output line %d: ended early, want %q", lineno+1, tt.expectedOut[lineno])
+			if err := validate(spdxDoc); err != nil {
+				t.Fatalf("sbom: document fails to validate: %v", err)
 			}
 
+			gotData, err := json.Marshal(spdxDoc)
+			if err != nil {
+				t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
+				return
+			}
+
+			t.Logf("Got SPDX Doc: %s", string(gotData))
+
+			expectedData, err := json.Marshal(tt.expectedOut)
+			if err != nil {
+				t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
+				return
+			}
+
+			t.Logf("Want SPDX Doc: %s", string(expectedData))
+
+			// compare the spdx Docs
+			compareSpdxDocs(t, spdxDoc, tt.expectedOut)
+
+			// compare deps
 			t.Logf("got deps: %q", deps)
 
 			t.Logf("want deps: %q", tt.expectedDeps)
@@ -1504,242 +2263,296 @@
 	}
 }
 
-type matcher interface {
-	isMatch(line string) bool
-	String() string
+func TestGenerateSPDXNamespace(t *testing.T) {
+
+	buildID1 := "example-1"
+	buildID2 := "example-2"
+	files1 := "file1"
+	timestamp1 := "2022-05-01"
+	timestamp2 := "2022-05-02"
+	files2 := "file2"
+
+	// Test case 1: different timestamps, same files
+	nsh1 := generateSPDXNamespace("", timestamp1, files1)
+	nsh2 := generateSPDXNamespace("", timestamp2, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp2, files1)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp2, files1)
+	}
+
+	// Test case 2: different build ids, same timestamps and files
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID2, timestamp1, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID2, timestamp1, files1)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID2, timestamp1, files1)
+	}
+
+	// Test case 3: same build ids and files, different timestamps
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID1, timestamp2, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp2, files1)
+	}
+
+	if nsh1 != nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected same namespace hashes, but got different: %s and %s", buildID1, timestamp1, files1, buildID2, timestamp1, files1, nsh1, nsh2)
+	}
+
+	// Test case 4: same build ids and timestamps, different files
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID1, timestamp1, files2)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files2)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID1, timestamp1, files2)
+	}
+
+	// Test case 5: empty build ids, same timestamps and different files
+	nsh1 = generateSPDXNamespace("", timestamp1, files1)
+	nsh2 = generateSPDXNamespace("", timestamp1, files2)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files2)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp1, files2)
+	}
 }
 
-type packageTag struct {
-	name string
+func getCreationInfo(t *testing.T) *spdx.CreationInfo {
+	ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
+	if err != nil {
+		t.Errorf("Unable to get creation info: %v", err)
+		return nil
+	}
+	return ci
 }
 
-func (m packageTag) isMatch(line string) bool {
-	groups := spdxPackageTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+// validate returns an error if the Document is found to be invalid
+func validate(doc *spdx.Document) error {
+	if doc.SPDXVersion == "" {
+		return fmt.Errorf("SPDXVersion: got nothing, want spdx version")
+	}
+	if doc.DataLicense == "" {
+		return fmt.Errorf("DataLicense: got nothing, want Data License")
+	}
+	if doc.SPDXIdentifier == "" {
+		return fmt.Errorf("SPDXIdentifier: got nothing, want SPDX Identifier")
+	}
+	if doc.DocumentName == "" {
+		return fmt.Errorf("DocumentName: got nothing, want Document Name")
+	}
+	if c := fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator); c != "Google LLC" {
+		return fmt.Errorf("Creator: got %v, want  'Google LLC'", c)
+	}
+	_, err := time.Parse(time.RFC3339, doc.CreationInfo.Created)
+	if err != nil {
+		return fmt.Errorf("Invalid time spec: %q: got error %q, want no error", doc.CreationInfo.Created, err)
+	}
+
+	for _, license := range doc.OtherLicenses {
+		if license.ExtractedText == "" {
+			return fmt.Errorf("License file: %q: got nothing, want license text", license.LicenseName)
+		}
+	}
+	return nil
+}
+
+// compareSpdxDocs deep-compares two spdx docs by going through the info section, packages, relationships and licenses
+func compareSpdxDocs(t *testing.T, actual, expected *spdx.Document) {
+
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: SPDX Doc is nil! Got %v: Expected %v", actual, expected)
+	}
+
+	if actual.DocumentName != expected.DocumentName {
+		t.Errorf("sbom: unexpected SPDX Document Name got %q, want %q", actual.DocumentName, expected.DocumentName)
+	}
+
+	if actual.SPDXVersion != expected.SPDXVersion {
+		t.Errorf("sbom: unexpected SPDX Version got %s, want %s", actual.SPDXVersion, expected.SPDXVersion)
+	}
+
+	if actual.DataLicense != expected.DataLicense {
+		t.Errorf("sbom: unexpected SPDX DataLicense got %s, want %s", actual.DataLicense, expected.DataLicense)
+	}
+
+	if actual.SPDXIdentifier != expected.SPDXIdentifier {
+		t.Errorf("sbom: unexpected SPDX Identified got %s, want %s", actual.SPDXIdentifier, expected.SPDXIdentifier)
+	}
+
+	if actual.DocumentNamespace != expected.DocumentNamespace {
+		t.Errorf("sbom: unexpected SPDX Document Namespace got %s, want %s", actual.DocumentNamespace, expected.DocumentNamespace)
+	}
+
+	// compare creation info
+	compareSpdxCreationInfo(t, actual.CreationInfo, expected.CreationInfo)
+
+	// compare packages
+	if len(actual.Packages) != len(expected.Packages) {
+		t.Errorf("SBOM: Number of Packages is different! Got %d: Expected %d", len(actual.Packages), len(expected.Packages))
+	}
+
+	for i, pkg := range actual.Packages {
+		if !compareSpdxPackages(t, i, pkg, expected.Packages[i]) {
+			break
+		}
+	}
+
+	// compare licenses
+	if len(actual.OtherLicenses) != len(expected.OtherLicenses) {
+		t.Errorf("SBOM: Number of Licenses in actual is different! Got %d: Expected %d", len(actual.OtherLicenses), len(expected.OtherLicenses))
+	}
+	for i, license := range actual.OtherLicenses {
+		if !compareLicenses(t, i, license, expected.OtherLicenses[i]) {
+			break
+		}
+	}
+
+	//compare Relationships
+	if len(actual.Relationships) != len(expected.Relationships) {
+		t.Errorf("SBOM: Number of Licenses in actual is different! Got %d: Expected %d", len(actual.Relationships), len(expected.Relationships))
+	}
+	for i, rl := range actual.Relationships {
+		if !compareRelationShips(t, i, rl, expected.Relationships[i]) {
+			break
+		}
+	}
+}
+
+func compareSpdxCreationInfo(t *testing.T, actual, expected *spdx.CreationInfo) {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Creation info is nil! Got %q: Expected %q", actual, expected)
+	}
+
+	if actual.LicenseListVersion != expected.LicenseListVersion {
+		t.Errorf("SBOM: Creation info license version Error! Got %s: Expected %s", actual.LicenseListVersion, expected.LicenseListVersion)
+	}
+
+	if len(actual.Creators) != len(expected.Creators) {
+		t.Errorf("SBOM: Creation info creators Error! Got %d: Expected %d", len(actual.Creators), len(expected.Creators))
+	}
+
+	for i, info := range actual.Creators {
+		if info != expected.Creators[i] {
+			t.Errorf("SBOM: Creation info creators Error! Got %q: Expected %q", info, expected.Creators[i])
+		}
+	}
+}
+
+func compareSpdxPackages(t *testing.T, i int, actual, expected *spdx.Package) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Packages are nil at index %d! Got %v: Expected %v", i, actual, expected)
 		return false
 	}
-	return groups[1] == m.name
-}
-
-func (m packageTag) String() string {
-	return "##### Package: " + m.name
-}
-
-type packageName struct {
-	name string
-}
-
-func (m packageName) isMatch(line string) bool {
-	groups := spdxPackageNameTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+	if actual.PackageName != expected.PackageName {
+		t.Errorf("SBOM: Package name Error at index %d! Got %s: Expected %s", i, actual.PackageName, expected.PackageName)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.name)
-}
 
-func (m packageName) String() string {
-	return "PackageName: " + replaceSlashes(m.name)
-}
-
-type spdxID struct {}
-
-func (m spdxID) isMatch(line string) bool {
-	return spdxIDTag.MatchString(line)
-}
-
-func (m spdxID) String() string {
-	return "SPDXID: SPDXRef-DOCUMENT"
-}
-
-type spdxPkgID struct {
-	name string
-}
-
-func (m spdxPkgID) isMatch(line string) bool {
-	groups := spdxPkgIDTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+	if actual.PackageVersion != expected.PackageVersion {
+		t.Errorf("SBOM: Package version Error at index %d! Got %s: Expected %s", i, actual.PackageVersion, expected.PackageVersion)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.name)
-}
 
-func (m spdxPkgID) String() string {
-	return "SPDXID: SPDXRef-Package-" + replaceSlashes(m.name)
-}
-
-type spdxVersion struct{}
-
-func (m spdxVersion) isMatch(line string) bool {
-	return spdxVersionTag.MatchString(line)
-}
-
-func (m spdxVersion) String() string {
-	return "SPDXVersion: SPDX-2.2"
-}
-
-type spdxDataLicense struct{}
-
-func (m spdxDataLicense) isMatch(line string) bool {
-	return spdxDataLicenseTag.MatchString(line)
-}
-
-func (m spdxDataLicense) String() string {
-	return "DataLicense: CC0-1.0"
-}
-
-type spdxDocumentName struct {
-	name string
-}
-
-func (m spdxDocumentName) isMatch(line string) bool {
-	return spdxDocumentNameTag.MatchString(line)
-}
-
-func (m spdxDocumentName) String() string {
-	return "DocumentName: " + m.name
-}
-
-type spdxDocumentNameSpace struct {
-	name string
-}
-
-func (m spdxDocumentNameSpace) isMatch(line string) bool {
-	return spdxDocumentNameSpaceTag.MatchString(line)
-}
-
-func (m spdxDocumentNameSpace) String() string {
-	return "DocumentNameSpace: Android"
-}
-
-type spdxCreatorOrganization struct{}
-
-func (m spdxCreatorOrganization) isMatch(line string) bool {
-	return spdxCreatorOrganizationTag.MatchString(line)
-}
-
-func (m spdxCreatorOrganization) String() string {
-	return "Creator: Organization: Google LLC"
-}
-
-func fakeTime() time.Time {
-	return time.UnixMicro(0).UTC()
-}
-
-type spdxCreatedTime struct{}
-
-func (m spdxCreatedTime) isMatch(line string) bool {
-	return spdxCreatedTimeTag.MatchString(line)
-}
-
-func (m spdxCreatedTime) String() string {
-	return "Created: 1970-01-01T00:00:00Z"
-}
-
-type spdxPkgDownloadLocation struct {
-	name string
-}
-
-func (m spdxPkgDownloadLocation) isMatch(line string) bool {
-	return spdxPkgDownloadLocationTag.MatchString(line)
-}
-
-func (m spdxPkgDownloadLocation) String() string {
-	return "PackageDownloadLocation: " + m.name
-}
-
-type spdxPkgLicenseDeclared struct {
-	name string
-}
-
-func (m spdxPkgLicenseDeclared) isMatch(line string) bool {
-	groups := spdxPkgLicenseDeclaredTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+	if actual.PackageSPDXIdentifier != expected.PackageSPDXIdentifier {
+		t.Errorf("SBOM: Package identifier Error at index %d! Got %s: Expected %s", i, actual.PackageSPDXIdentifier, expected.PackageSPDXIdentifier)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.name)
-}
 
-func (m spdxPkgLicenseDeclared) String() string {
-	return "PackageLicenseConcluded: LicenseRef-" + m.name
-}
-
-type spdxRelationship struct {
-	pkg1     string
-	pkg2     string
-	relation string
-}
-
-func (m spdxRelationship) isMatch(line string) bool {
-	groups := spdxRelationshipTag.FindStringSubmatch(line)
-	if len(groups) != 4 {
+	if actual.PackageDownloadLocation != expected.PackageDownloadLocation {
+		t.Errorf("SBOM: Package download location Error at index %d! Got %s: Expected %s", i, actual.PackageDownloadLocation, expected.PackageDownloadLocation)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.pkg1) && groups[2] == m.relation && groups[3] == replaceSlashes(m.pkg2)
-}
 
-func (m spdxRelationship) String() string {
-	return "Relationship: SPDXRef-" + replaceSlashes(m.pkg1) + " " + m.relation + " SPDXRef-Package-" + replaceSlashes(m.pkg2)
-}
-
-type spdxLicense struct{}
-
-func (m spdxLicense) isMatch(line string) bool {
-	return spdxLicenseTag.MatchString(line)
-}
-
-func (m spdxLicense) String() string {
-	return "##### Non-standard license:"
-}
-
-type spdxLicenseID struct {
-	name string
-}
-
-func (m spdxLicenseID) isMatch(line string) bool {
-	groups := spdxLicenseIDTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+	if actual.PackageLicenseConcluded != expected.PackageLicenseConcluded {
+		t.Errorf("SBOM: Package license concluded Error at index %d! Got %s: Expected %s", i, actual.PackageLicenseConcluded, expected.PackageLicenseConcluded)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.name)
+	return true
 }
 
-func (m spdxLicenseID) String() string {
-	return "LicenseID: LicenseRef-" + m.name
-}
-
-type spdxExtractedText struct {
-	name string
-}
-
-func (m spdxExtractedText) isMatch(line string) bool {
-	groups := spdxExtractedTextTag.FindStringSubmatch(line)
-	if len(groups) != 2 {
+func compareRelationShips(t *testing.T, i int, actual, expected *spdx.Relationship) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Relationships is nil at index %d! Got %v: Expected %v", i, actual, expected)
 		return false
 	}
-	return groups[1] == replaceSlashes(m.name)
-}
 
-func (m spdxExtractedText) String() string {
-	return "ExtractedText: <text>" + m.name
-}
-
-type spdxExtractedClosingText struct{}
-
-func (m spdxExtractedClosingText) isMatch(line string) bool {
-	return spdxExtractedClosingTextTag.MatchString(line)
-}
-
-func (m spdxExtractedClosingText) String() string {
-	return "</text>"
-}
-
-type matcherList []matcher
-
-func (l matcherList) String() string {
-	var sb strings.Builder
-	for _, m := range l {
-		s := m.String()
-		fmt.Fprintf(&sb, "%s\n", s)
+	if actual.RefA != expected.RefA {
+		t.Errorf("SBOM: Relationship RefA Error at index %d! Got %s: Expected %s", i, actual.RefA, expected.RefA)
+		return false
 	}
-	return sb.String()
+
+	if actual.RefB != expected.RefB {
+		t.Errorf("SBOM: Relationship RefB Error at index %d! Got %s: Expected %s", i, actual.RefB, expected.RefB)
+		return false
+	}
+
+	if actual.Relationship != expected.Relationship {
+		t.Errorf("SBOM: Relationship type Error at index %d! Got %s: Expected %s", i, actual.Relationship, expected.Relationship)
+		return false
+	}
+	return true
+}
+
+func compareLicenses(t *testing.T, i int, actual, expected *spdx.OtherLicense) bool {
+	if actual == nil || expected == nil {
+		t.Errorf("SBOM: Licenses is nil at index %d! Got %v: Expected %v", i, actual, expected)
+		return false
+	}
+
+	if actual.LicenseName != expected.LicenseName {
+		t.Errorf("SBOM: License Name Error at index %d! Got %s: Expected %s", i, actual.LicenseName, expected.LicenseName)
+		return false
+	}
+
+	if actual.LicenseIdentifier != expected.LicenseIdentifier {
+		t.Errorf("SBOM: License Identifier Error at index %d! Got %s: Expected %s", i, actual.LicenseIdentifier, expected.LicenseIdentifier)
+		return false
+	}
+
+	if actual.ExtractedText != expected.ExtractedText {
+		t.Errorf("SBOM: License Extracted Text Error at index %d! Got: %q want: %q", i, actual.ExtractedText, expected.ExtractedText)
+		return false
+	}
+	return true
+}
+
+func fakeTime() string {
+	t := time.UnixMicro(0)
+	return t.UTC().Format("2006-01-02T15:04:05Z")
 }
diff --git a/tools/compliance/go.mod b/tools/compliance/go.mod
index 088915a..1928189 100644
--- a/tools/compliance/go.mod
+++ b/tools/compliance/go.mod
@@ -7,8 +7,11 @@
 require (
 	android/soong v0.0.0
 	github.com/google/blueprint v0.0.0
+	github.com/spdx/tools-golang v0.0.0
 )
 
+replace github.com/spdx/tools-golang v0.0.0 => ../../../../external/spdx-tools
+
 require golang.org/x/xerrors v0.0.0-20220609144429-65e65417b02f // indirect
 
 replace android/soong v0.0.0 => ../../../soong
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
new file mode 100644
index 0000000..501f260
--- /dev/null
+++ b/tools/finalization/README.md
@@ -0,0 +1,22 @@
+# Finalization tools
+This folder contains automation and CI scripts for [finalizing](https://go/android-finalization) Android before release.
+
+## Automation:
+1. [Environment setup](./environment.sh). Set values for varios finalization constants.
+2. [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
+3. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
+4. [Finalize SDK and submit](./step-1.sh). Do [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) step, create CLs, organize them into topic and send to Gerrit.
+  a. [Update SDK and submit](./update-step-1.sh). Same as above, but updates the existings CLs.
+5. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
+  a. [Update Android and submit](./update-step-2.sh). Same as above, but updates the existings CLs.
+
+## CI:
+Performed in build targets in Finalization branches.
+1. [Finalization Step 1 for Main, git_main-fina-1-release](https://android-build.googleplex.com/builds/branches/git_main-fina-1-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
+2. [Finalization Step 1 for UDC, git_udc-fina-1-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-1-release/grid). Same but for udc-dev.
+3. [Finalization Step 2 for Main, git_main-fina-2-release](https://android-build.googleplex.com/builds/branches/git_main-fina-2-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
+4. [Finalization Step 2 for UDC, git_udc-fina-2-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-2-release/grid). Same but for udc-dev.
+5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
+
+## Utility:
+[Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch.
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
index eaaf0cd..84e2782 100755
--- a/tools/finalization/build-step-1-and-2.sh
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -6,17 +6,18 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # SDK codename -> int
+        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    fi;
 
-    # SDK codename -> int
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "sdk" ] ; then
+        # ADB, Platform/Mainline SDKs build and move to prebuilts
+        source $top/build/make/tools/finalization/localonly-steps.sh
 
-    # Platform/Mainline SDKs build and move to prebuilts
-    source $top/build/make/tools/finalization/localonly-finalize-mainline-sdk.sh
-
-    # REL
-    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+        # REL
+        source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+    fi;
 }
 
 finalize_main_step12
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
index edf497e..3d5eadb 100755
--- a/tools/finalization/build-step-1.sh
+++ b/tools/finalization/build-step-1.sh
@@ -6,11 +6,10 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # Build finalization artifacts.
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # Build finalization artifacts.
+        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    fi;
 }
 
 finalize_main_step1
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index 983e19d..9714ac4 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -2,14 +2,23 @@
 
 set -ex
 
-export FINAL_BUG_ID='0'
+export FINAL_BUG_ID='0' # CI only
 
-export FINAL_PLATFORM_CODENAME='UpsideDownCake'
+export FINAL_PLATFORM_CODENAME='VanillaIceCream'
 export CURRENT_PLATFORM_CODENAME='VanillaIceCream'
-export FINAL_PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
-export FINAL_PLATFORM_SDK_VERSION='34'
-export FINAL_PLATFORM_VERSION='14'
+export FINAL_PLATFORM_CODENAME_JAVA='VANILLA_ICE_CREAM'
+export FINAL_PLATFORM_VERSION='15'
 
-export FINAL_BUILD_PREFIX='UP1A'
+# Set arbitrary large values for CI.
+# SDK_VERSION needs to be <61 (lint/libs/lint-api/src/main/java/com/android/tools/lint/detector/api/ApiConstraint.kt)
+# There are multiple places where we rely on next SDK version to be previous + 1, e.g. RESOURCES_SDK_INT.
+# We might or might not fix this in future, but for now let's keep it +1.
+export FINAL_PLATFORM_SDK_VERSION='35'
+# Feel free to randomize once in a while to detect buggy version detection code.
+export FINAL_MAINLINE_EXTENSION='58'
 
-export FINAL_MAINLINE_EXTENSION='6'
\ No newline at end of file
+# Options:
+# 'unfinalized' - branch is in development state,
+# 'sdk' - SDK/API is finalized
+# 'rel' - branch is finalized, switched to REL
+export FINAL_STATE='unfinalized'
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index e23e585..fa33986 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -4,7 +4,15 @@
 
 function apply_droidstubs_hack() {
     if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
-        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+        local build_soong_git_root="$(readlink -f $top/build/soong)"
+        git -C "$build_soong_git_root" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+    fi
+}
+
+function apply_resources_sdk_int_fix() {
+    if ! grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
+        local base_git_root="$(readlink -f $top/frameworks/base)"
+        git -C "$base_git_root" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
     fi
 }
 
@@ -69,7 +77,7 @@
     local SDK_VERSION="public static final int $FINAL_PLATFORM_CODENAME_JAVA = $FINAL_PLATFORM_SDK_VERSION;"
 
     # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
 
     # The full process can be found at (INTERNAL) go/android-sdk-finalization.
 
@@ -86,9 +94,7 @@
     AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
 
     # Generate ABI dumps
-    ANDROID_BUILD_TOP="$top" \
-        out/host/linux-x86/bin/create_reference_dumps \
-        -p aosp_arm64 --build-variant user
+    ANDROID_BUILD_TOP="$top" out/host/linux-x86/bin/create_reference_dumps
 
     echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
     # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
@@ -101,6 +107,12 @@
     # frameworks/libs/modules-utils
     finalize_modules_utils
 
+    # development/sdk
+    local platform_source="$top/development/sdk/platform_source.prop_template"
+    sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=1/g' $platform_source
+    local build_tools_source="$top/development/sdk/build_tools_source.prop_template"
+    sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=${PLATFORM_SDK_VERSION}.0.0/g' $build_tools_source
+
     # build/make
     local version_defaults="$top/build/make/core/version_defaults.mk"
     sed -i -e "s/PLATFORM_SDK_VERSION := .*/PLATFORM_SDK_VERSION := ${FINAL_PLATFORM_SDK_VERSION}/g" $version_defaults
@@ -108,8 +120,11 @@
     sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
     cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
 
-    # build/soong
-    sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t\t\"${FINAL_PLATFORM_CODENAME}\":     ${FINAL_PLATFORM_SDK_VERSION}," "$top/build/soong/android/api_levels.go"
+    # build/bazel
+    local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}"
+    if ! grep -q "$codename_version" "$top/build/bazel/rules/common/api_constants.bzl" ; then
+        sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\    $codename_version," "$top/build/bazel/rules/common/api_constants.bzl"
+    fi
 
     # cts
     echo ${FINAL_PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt"
@@ -129,11 +144,17 @@
 
     # frameworks/base
     sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java"
+    apply_resources_sdk_int_fix
     sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\    SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h"
     sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\  SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h"
 
     # Bump Mainline SDK extension version.
+    local SDKEXT="packages/modules/SdkExtensions/"
     "$top/packages/modules/SdkExtensions/gen_sdk/bump_sdk.sh" ${FINAL_MAINLINE_EXTENSION}
+    # Leave the last commit as a set of modified files.
+    # The code to create a finalization topic will pick it up later.
+    git -C ${SDKEXT} reset HEAD~1
+
     local version_defaults="$top/build/make/core/version_defaults.mk"
     sed -i -e "s/PLATFORM_SDK_EXTENSION_VERSION := .*/PLATFORM_SDK_EXTENSION_VERSION := ${FINAL_MAINLINE_EXTENSION}/g" $version_defaults
 
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 56f3bc3..62e5ee5 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -8,6 +8,12 @@
     fi
 }
 
+function revert_resources_sdk_int_fix() {
+    if grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
+        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
+    fi
+}
+
 function apply_prerelease_sdk_hack() {
     if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then
         git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_hack.diff
@@ -18,21 +24,18 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
     # revert droidstubs hack now we are switching to REL
     revert_droidstubs_hack
 
     # let the apps built with pre-release SDK parse
     apply_prerelease_sdk_hack
 
-    # adb keys
-    $m adb
-    LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key"
+    # in REL mode, resources would correctly set the resources_sdk_int, no fix required
+    revert_resources_sdk_int_fix
 
     # build/make/core/version_defaults.mk
-    sed -i -e "s/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := REL/g" "$top/build/make/core/version_defaults.mk"
+    # Mark all versions "released".
+    sed -i 's/\(PLATFORM_VERSION_CODENAME\.[^[:space:]]*\) := [^[:space:]]*/\1 := REL/g' "$top/build/make/core/version_defaults.mk"
 
     # cts
     echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt"
@@ -47,13 +50,17 @@
     cp -r "$top/system/sepolicy/private/" "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
 
     # prebuilts/abi-dumps/ndk
-    mv "$top/prebuilts/abi-dumps/ndk/current" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
-
-    # prebuilts/abi-dumps/vndk
-    mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
+    mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
+    cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/"
 
     # prebuilts/abi-dumps/platform
-    mv "$top/prebuilts/abi-dumps/platform/current" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
+    mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
+    cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
+
+    if [ "$FINAL_STATE" != "sdk" ] || [ "$FINAL_PLATFORM_CODENAME" == "$CURRENT_PLATFORM_CODENAME" ] ; then
+        # prebuilts/abi-dumps/vndk
+        mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
+    fi;
 }
 
 finalize_sdk_rel
diff --git a/tools/finalization/frameworks_base.apply_resource_sdk_int.diff b/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
new file mode 100644
index 0000000..f0576d0
--- /dev/null
+++ b/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
@@ -0,0 +1,24 @@
+From cdb47fc90b8d6860ec1dc5efada1f9ccd471618b Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Tue, 11 Apr 2023 22:12:44 +0000
+Subject: [PATCH] Don't force +1 for resource resolution.
+
+Bug: 277674088
+Fixes: 277674088
+Test: boots, no crashes
+Change-Id: I17e743a0f1cf6f98fddd40c358dea5a8b9cc7723
+---
+
+diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java
+index eb47170..4d3e92b 100755
+--- a/core/java/android/os/Build.java
++++ b/core/java/android/os/Build.java
+@@ -493,7 +493,7 @@
+          * @hide
+          */
+         @TestApi
+-        public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length;
++        public static final int RESOURCES_SDK_INT = SDK_INT;
+ 
+         /**
+          * The current lowest supported value of app target SDK. Applications targeting
diff --git a/tools/finalization/frameworks_base.revert_resource_sdk_int.diff b/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
new file mode 100644
index 0000000..2ade499
--- /dev/null
+++ b/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
@@ -0,0 +1,27 @@
+From c7e460bb19071d867cd7ca04282ce42694f4f358 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Wed, 12 Apr 2023 01:06:26 +0000
+Subject: [PATCH] Revert "Don't force +1 for resource resolution."
+
+It's not required for master.
+
+This reverts commit f1cb683988f81579a76ddbf9993848a4a06dd28c.
+
+Bug: 277674088
+Test: boots, no crashes
+Change-Id: Ia1692548f26496fdc6f1e4f0557213c7996d6823
+---
+
+diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java
+index 4d3e92b..eb47170 100755
+--- a/core/java/android/os/Build.java
++++ b/core/java/android/os/Build.java
+@@ -493,7 +493,7 @@
+          * @hide
+          */
+         @TestApi
+-        public static final int RESOURCES_SDK_INT = SDK_INT;
++        public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length;
+ 
+         /**
+          * The current lowest supported value of app target SDK. Applications targeting
diff --git a/tools/finalization/localonly-finalize-mainline-sdk.sh b/tools/finalization/localonly-finalize-mainline-sdk.sh
deleted file mode 100755
index 104b6ac..0000000
--- a/tools/finalization/localonly-finalize-mainline-sdk.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_locally_mainline_sdk() {
-    local top="$(dirname "$0")"/../../../..
-    source $top/build/make/tools/finalization/environment.sh
-
-    # Build Platform SDKs.
-    $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo
-
-    # Build Modules SDKs.
-    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true "$top/vendor/google/build/mainline_modules_sdks.sh"
-
-    # Update prebuilts.
-    # "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/packages/modules/common/tools/finalize_sdk.py" -l -b 0 -f ${FINAL_MAINLINE_EXTENSION} -r '' 0
-    "$top/prebuilts/build-tools/path/linux-x86/python3" "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
-}
-
-finalize_locally_mainline_sdk
-
diff --git a/tools/finalization/localonly-steps.sh b/tools/finalization/localonly-steps.sh
new file mode 100755
index 0000000..7318ca1
--- /dev/null
+++ b/tools/finalization/localonly-steps.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_locally() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
+
+    # adb keys
+    $m adb
+    LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key"
+
+    # Build Platform SDKs.
+    $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist
+
+    # Build Modules SDKs.
+    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh" --build-release=latest
+
+    # Update prebuilts.
+    "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
+}
+
+finalize_locally
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
index cf21e45..0dd4b3a 100755
--- a/tools/finalization/step-1.sh
+++ b/tools/finalization/step-1.sh
@@ -9,7 +9,7 @@
         if [[ $(git status --short) ]]; then
             repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
             git add -A . ;
-            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION" \
+            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION and extension version $FINAL_MAINLINE_EXTENSION" \
                        -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
 Bug: $FINAL_BUG_ID
 Test: build";
diff --git a/tools/finalization/update-step-1.sh b/tools/finalization/update-step-1.sh
new file mode 100755
index 0000000..b469988
--- /dev/null
+++ b/tools/finalization/update-step-1.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Script to perform a 1st step of Android Finalization: API/SDK finalization, update CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_1_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_1_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # vndk etc finalization
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # update existing CLs and upload to gerrit
+    update_step_1_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_1_main
diff --git a/tools/finalization/update-step-2.sh b/tools/finalization/update-step-2.sh
new file mode 100755
index 0000000..d2b8592
--- /dev/null
+++ b/tools/finalization/update-step-2.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_2_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_2_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # prebuilts etc
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    update_step_2_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_2_main
diff --git a/tools/find_static_candidates.py b/tools/find_static_candidates.py
new file mode 100644
index 0000000..7511b36
--- /dev/null
+++ b/tools/find_static_candidates.py
@@ -0,0 +1,232 @@
+#!/usr/bin/env python3
+
+"""Tool to find static libraries that maybe should be shared libraries and shared libraries that maybe should be static libraries.
+
+This tool only looks at the module-info.json for the current target.
+
+Example of "class" types for each of the modules in module-info.json
+  "EXECUTABLES": 2307,
+  "ETC": 9094,
+  "NATIVE_TESTS": 10461,
+  "APPS": 2885,
+  "JAVA_LIBRARIES": 5205,
+  "EXECUTABLES/JAVA_LIBRARIES": 119,
+  "FAKE": 553,
+  "SHARED_LIBRARIES/STATIC_LIBRARIES": 7591,
+  "STATIC_LIBRARIES": 11535,
+  "SHARED_LIBRARIES": 10852,
+  "HEADER_LIBRARIES": 1897,
+  "DYLIB_LIBRARIES": 1262,
+  "RLIB_LIBRARIES": 3413,
+  "ROBOLECTRIC": 39,
+  "PACKAGING": 5,
+  "PROC_MACRO_LIBRARIES": 36,
+  "RENDERSCRIPT_BITCODE": 17,
+  "DYLIB_LIBRARIES/RLIB_LIBRARIES": 8,
+  "ETC/FAKE": 1
+
+None of the "SHARED_LIBRARIES/STATIC_LIBRARIES" are double counted in the
+modules with one class
+RLIB/
+
+All of these classes have shared_libs and/or static_libs
+    "EXECUTABLES",
+    "SHARED_LIBRARIES",
+    "STATIC_LIBRARIES",
+    "SHARED_LIBRARIES/STATIC_LIBRARIES", # cc_library
+    "HEADER_LIBRARIES",
+    "NATIVE_TESTS", # test modules
+    "DYLIB_LIBRARIES", # rust
+    "RLIB_LIBRARIES", # rust
+    "ETC", # rust_bindgen
+"""
+
+from collections import defaultdict
+
+import json, os, argparse
+
+ANDROID_PRODUCT_OUT = os.environ.get("ANDROID_PRODUCT_OUT")
+# If a shared library is used less than MAX_SHARED_INCLUSIONS times in a target,
+# then it will likely save memory by changing it to a static library
+# This move will also use less storage
+MAX_SHARED_INCLUSIONS = 2
+# If a static library is used more than MAX_STATIC_INCLUSIONS times in a target,
+# then it will likely save memory by changing it to a shared library
+# This move will also likely use less storage
+MIN_STATIC_INCLUSIONS = 3
+
+
+def parse_args():
+  parser = argparse.ArgumentParser(
+      description=(
+          "Parse module-info.jso and display information about static and"
+          " shared library dependencies."
+      )
+  )
+  parser.add_argument(
+      "--module", dest="module", help="Print the info for the module."
+  )
+  parser.add_argument(
+      "--shared",
+      dest="print_shared",
+      action=argparse.BooleanOptionalAction,
+      help=(
+          "Print the list of libraries that are shared_libs for fewer than {}"
+          " modules.".format(MAX_SHARED_INCLUSIONS)
+      ),
+  )
+  parser.add_argument(
+      "--static",
+      dest="print_static",
+      action=argparse.BooleanOptionalAction,
+      help=(
+          "Print the list of libraries that are static_libs for more than {}"
+          " modules.".format(MIN_STATIC_INCLUSIONS)
+      ),
+  )
+  parser.add_argument(
+      "--recursive",
+      dest="recursive",
+      action=argparse.BooleanOptionalAction,
+      default=True,
+      help=(
+          "Gather all dependencies of EXECUTABLES recursvily before calculating"
+          " the stats. This eliminates duplicates from multiple libraries"
+          " including the same dependencies in a single binary."
+      ),
+  )
+  parser.add_argument(
+      "--both",
+      dest="both",
+      action=argparse.BooleanOptionalAction,
+      default=False,
+      help=(
+          "Print a list of libraries that are including libraries as both"
+          " static and shared"
+      ),
+  )
+  return parser.parse_args()
+
+
+class TransitiveHelper:
+
+  def __init__(self):
+    # keep a list of already expanded libraries so we don't end up in a cycle
+    self.visited = defaultdict(lambda: defaultdict(set))
+
+  # module is an object from the module-info dictionary
+  # module_info is the dictionary from module-info.json
+  # modify the module's shared_libs and static_libs with all of the transient
+  # dependencies required from all of the explicit dependencies
+  def flattenDeps(self, module, module_info):
+    libs_snapshot = dict(shared_libs = set(module["shared_libs"]), static_libs = set(module["static_libs"]))
+
+    for lib_class in ["shared_libs", "static_libs"]:
+      for lib in libs_snapshot[lib_class]:
+        if not lib or lib not in module_info:
+          continue
+        if lib in self.visited:
+          module[lib_class].update(self.visited[lib][lib_class])
+        else:
+          res = self.flattenDeps(module_info[lib], module_info)
+          module[lib_class].update(res[lib_class])
+          self.visited[lib][lib_class].update(res[lib_class])
+
+    return module
+
+def main():
+  module_info = json.load(open(ANDROID_PRODUCT_OUT + "/module-info.json"))
+  # turn all of the static_libs and shared_libs lists into sets to make them
+  # easier to update
+  for _, module in module_info.items():
+    module["shared_libs"] = set(module["shared_libs"])
+    module["static_libs"] = set(module["static_libs"])
+
+  args = parse_args()
+
+  if args.module:
+    if args.module not in module_info:
+      print("Module {} does not exist".format(args.module))
+      exit(1)
+
+  includedStatically = defaultdict(set)
+  includedSharedly = defaultdict(set)
+  includedBothly = defaultdict(set)
+  transitive = TransitiveHelper()
+  for name, module in module_info.items():
+    if args.recursive:
+      # in this recursive mode we only want to see what is included by the executables
+      if "EXECUTABLES" not in module["class"]:
+        continue
+      module = transitive.flattenDeps(module, module_info)
+      # filter out fuzzers by their dependency on clang
+      if "libclang_rt.fuzzer" in module["static_libs"]:
+        continue
+    else:
+      if "NATIVE_TESTS" in module["class"]:
+        # We don't care about how tests are including libraries
+        continue
+
+    # count all of the shared and static libs included in this module
+    for lib in module["shared_libs"]:
+      includedSharedly[lib].add(name)
+    for lib in module["static_libs"]:
+      includedStatically[lib].add(name)
+
+    intersection = set(module["shared_libs"]).intersection(
+        module["static_libs"]
+    )
+    if intersection:
+      includedBothly[name] = intersection
+
+  if args.print_shared:
+    print(
+        "Shared libraries that are included by fewer than {} modules on a"
+        " device:".format(MAX_SHARED_INCLUSIONS)
+    )
+    for name, libs in includedSharedly.items():
+      if len(libs) < MAX_SHARED_INCLUSIONS:
+        print("{}: {} included by: {}".format(name, len(libs), libs))
+
+  if args.print_static:
+    print(
+        "Libraries that are included statically by more than {} modules on a"
+        " device:".format(MIN_STATIC_INCLUSIONS)
+    )
+    for name, libs in includedStatically.items():
+      if len(libs) > MIN_STATIC_INCLUSIONS:
+        print("{}: {} included by: {}".format(name, len(libs), libs))
+
+  if args.both:
+    allIncludedBothly = set()
+    for name, libs in includedBothly.items():
+      allIncludedBothly.update(libs)
+
+    print(
+        "List of libraries used both statically and shared in the same"
+        " processes:\n {}\n\n".format("\n".join(sorted(allIncludedBothly)))
+    )
+    print(
+        "List of libraries used both statically and shared in any processes:\n {}".format("\n".join(sorted(includedStatically.keys() & includedSharedly.keys()))))
+
+  if args.module:
+    print(json.dumps(module_info[args.module], default=list, indent=2))
+    print(
+        "{} is included in shared_libs {} times by these modules: {}".format(
+            args.module, len(includedSharedly[args.module]),
+            includedSharedly[args.module]
+        )
+    )
+    print(
+        "{} is included in static_libs {} times by these modules: {}".format(
+            args.module, len(includedStatically[args.module]),
+            includedStatically[args.module]
+        )
+    )
+    print("Shared libs included by this module that are used in fewer than {} processes:\n{}".format(
+        MAX_SHARED_INCLUSIONS, [x for x in module_info[args.module]["shared_libs"] if len(includedSharedly[x]) < MAX_SHARED_INCLUSIONS]))
+
+
+
+if __name__ == "__main__":
+  main()
diff --git a/tools/list_files.py b/tools/list_files.py
new file mode 100644
index 0000000..3afa81f
--- /dev/null
+++ b/tools/list_files.py
@@ -0,0 +1,102 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from typing import List
+from glob import glob
+from pathlib import Path
+from os.path import join, relpath
+import argparse
+
+class FileLister:
+    def __init__(self, args) -> None:
+        self.out_file = args.out_file
+
+        self.folder_dir = args.dir
+        self.extensions = [e if e.startswith(".") else "." + e for e in args.extensions]
+        self.root = args.root
+        self.files_list = list()
+
+    def get_files(self) -> None:
+        """Get all files directory in the input directory including the files in the subdirectories
+
+        Recursively finds all files in the input directory.
+        Set file_list as a list of file directory strings,
+        which do not include directories but only files.
+        List is sorted in alphabetical order of the file directories.
+
+        Args:
+            dir: Directory to get the files. String.
+
+        Raises:
+            FileNotFoundError: An error occurred accessing the non-existing directory
+        """
+
+        if not dir_exists(self.folder_dir):
+            raise FileNotFoundError(f"Directory {self.folder_dir} does not exist")
+
+        if self.folder_dir[:-2] != "**":
+            self.folder_dir = join(self.folder_dir, "**")
+
+        self.files_list = list()
+        for file in sorted(glob(self.folder_dir, recursive=True)):
+            if Path(file).is_file():
+                if self.root:
+                    file = join(self.root, relpath(file, self.folder_dir[:-2]))
+                self.files_list.append(file)
+
+
+    def list(self) -> None:
+        self.get_files()
+        self.files_list = [f for f in self.files_list if not self.extensions or Path(f).suffix in self.extensions]
+        self.write()
+
+    def write(self) -> None:
+        if self.out_file == "":
+            pprint(self.files_list)
+        else:
+            write_lines(self.out_file, self.files_list)
+
+###
+# Helper functions
+###
+def pprint(l: List[str]) -> None:
+    for line in l:
+        print(line)
+
+def dir_exists(dir: str) -> bool:
+    return Path(dir).exists()
+
+def write_lines(out_file: str, lines: List[str]) -> None:
+    with open(out_file, "w+") as f:
+        f.writelines(line + '\n' for line in lines)
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser()
+    parser.add_argument('dir', action='store', type=str,
+                        help="directory to list all subdirectory files")
+    parser.add_argument('--out', dest='out_file',
+                        action='store', default="", type=str,
+                        help="optional directory to write subdirectory files. If not set, will print to console")
+    parser.add_argument('--root', dest='root',
+                        action='store', default="", type=str,
+                        help="optional directory to replace the root directories of output.")
+    parser.add_argument('--extensions', nargs='*', default=list(), dest='extensions',
+                        help="Extensions to include in the output. If not set, all files are included")
+
+    args = parser.parse_args()
+
+    file_lister = FileLister(args)
+    file_lister.list()
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 38d17a8..31a460d 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -43,7 +43,7 @@
   """Validate GRF properties if exist.
 
   If ro.board.first_api_level is defined, check if its value is valid for the
-  sdk version.
+  sdk version. This is only for the release version.
   Also, validate the value of ro.board.api_level if defined.
 
   Returns:
@@ -51,6 +51,7 @@
   """
   grf_api_level = prop_list.get_value("ro.board.first_api_level")
   board_api_level = prop_list.get_value("ro.board.api_level")
+  platform_version_codename = prop_list.get_value("ro.build.version.codename")
 
   if not grf_api_level:
     if board_api_level:
@@ -61,6 +62,18 @@
     return True
 
   grf_api_level = int(grf_api_level)
+  if board_api_level:
+    board_api_level = int(board_api_level)
+    if board_api_level < grf_api_level:
+      sys.stderr.write("error: ro.board.api_level(%d) must be greater than "
+                       "ro.board.first_api_level(%d)\n"
+                       % (board_api_level, grf_api_level))
+      return False
+
+  # skip sdk version validation for dev-stage non-REL devices
+  if platform_version_codename != "REL":
+    return True
+
   if grf_api_level > sdk_version:
     sys.stderr.write("error: ro.board.first_api_level(%d) must be less than "
                      "or equal to ro.build.version.sdk(%d)\n"
@@ -68,12 +81,10 @@
     return False
 
   if board_api_level:
-    board_api_level = int(board_api_level)
-    if board_api_level < grf_api_level or board_api_level > sdk_version:
-      sys.stderr.write("error: ro.board.api_level(%d) must be neither less "
-                       "than ro.board.first_api_level(%d) nor greater than "
-                       "ro.build.version.sdk(%d)\n"
-                       % (board_api_level, grf_api_level, sdk_version))
+    if board_api_level > sdk_version:
+      sys.stderr.write("error: ro.board.api_level(%d) must be less than or "
+                       "equal to ro.build.version.sdk(%d)\n"
+                       % (board_api_level, sdk_version))
       return False
 
   return True
diff --git a/tools/protos/Android.bp b/tools/protos/Android.bp
new file mode 100644
index 0000000..c6ad19e
--- /dev/null
+++ b/tools/protos/Android.bp
@@ -0,0 +1,32 @@
+// Copyright 2023 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_library_host {
+    name: "metadata_file_proto_py",
+    version: {
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+        "metadata_file.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
diff --git a/tools/protos/metadata_file.proto b/tools/protos/metadata_file.proto
new file mode 100644
index 0000000..ac1129a
--- /dev/null
+++ b/tools/protos/metadata_file.proto
@@ -0,0 +1,281 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package metadata_file;
+
+// Proto definition of METADATA files of packages in AOSP codebase.
+message Metadata {
+  // Name of the package.
+  optional string name = 1;
+
+  // A short description (a few lines) of the package.
+  // Example: "Handles location lookups, throttling, batching, etc."
+  optional string description = 2;
+
+  // Specifies additional data about third-party packages.
+  optional ThirdParty third_party = 3;
+}
+
+message ThirdParty {
+  // URL(s) associated with the package.
+  //
+  // At a minimum, all packages must specify a URL which identifies where it
+  // came from, containing a type of: ARCHIVE, GIT or OTHER. Typically,
+  // a package should contain only a single URL from these types.  Occasionally,
+  // a package may be broken across multiple archive files for whatever reason,
+  // in which case having multiple ARCHIVE URLs is okay.  However, this should
+  // not be used to combine different logical packages that are versioned and
+  // possibly licensed differently.
+  repeated URL url = 1;
+
+  // The package version.  In order of preference, this should contain:
+  //  - If the package comes from Git or another source control system,
+  //    a specific tag or revision in source control, such as "r123" or
+  //    "58e27d2".  This MUST NOT be a mutable ref such as a branch name.
+  //  - a released package version such as "1.0", "2.3-beta", etc.
+  //  - the date the package was retrieved, formatted as "As of YYYY-MM-DD".
+  optional string version = 2;
+
+  // The date of the change in which the package was last upgraded from
+  // upstream.
+  // This should only identify package upgrades from upstream, not local
+  // modifications. This may identify the date of either the original or
+  // merged change.
+  //
+  // Note: this is NOT the date that this version of the package was released
+  // externally.
+  optional Date last_upgrade_date = 3;
+
+  // License type that identifies how the package may be used.
+  optional LicenseType license_type = 4;
+
+  // An additional note explaining the licensing of this package.  This is most
+  // commonly used with commercial license.
+  optional string license_note = 5;
+
+  // Description of local changes that have been made to the package.  This does
+  // not need to (and in most cases should not) attempt to include an exhaustive
+  // list of all changes, but may instead direct readers to review the local
+  // commit history, a collection of patch files, a separate README.md (or
+  // similar) document, etc.
+  // Note: Use of this field to store IDs of advisories fixed with a backported
+  // patch is deprecated, use "security.mitigated_security_patch" instead.
+  optional string local_modifications = 6;
+
+  // Security related metadata including risk category and any special
+  // instructions for using the package, as determined by an ISE-TPS review.
+  optional Security security = 7;
+
+  // The type of directory this metadata represents.
+  optional DirectoryType type = 8 [default = PACKAGE];
+
+  // The homepage for the package. This will eventually replace
+  // `url { type: HOMEPAGE }`
+  optional string homepage = 9;
+
+  // SBOM information of the package. It is mandatory for prebuilt packages.
+  oneof sbom {
+    // Reference to external SBOM document provided as URL.
+    SBOMRef sbom_ref = 10;
+  }
+
+}
+
+// URL associated with a third-party package.
+message URL {
+  enum Type {
+    // The homepage for the package. For example, "https://bazel.io/". This URL
+    // is optional, but encouraged to help disambiguate similarly named packages
+    // or to get more information about the package. This is especially helpful
+    // when no other URLs provide human readable resources (such as git:// or
+    // sso:// URLs).
+    HOMEPAGE = 1;
+
+    // The URL of the archive containing the source code for the package, for
+    // example a zip or tgz file.
+    ARCHIVE = 2;
+
+    // The URL of the upstream git repository this package is retrieved from.
+    // For example:
+    //  - https://github.com/git/git.git
+    //  - git://git.kernel.org/pub/scm/git/git.git
+    //
+    // Use of a git URL requires that the package "version" value must specify a
+    // specific git tag or revision.
+    GIT = 3;
+
+    // The URL of the upstream SVN repository this package is retrieved from.
+    // For example:
+    //  - http://llvm.org/svn/llvm-project/llvm/
+    //
+    // Use of an SVN URL requires that the package "version" value must specify
+    // a specific SVN tag or revision.
+    SVN = 4;
+
+    // The URL of the upstream mercurial repository this package is retrieved
+    // from. For example:
+    //   - https://mercurial-scm.org/repo/evolve
+    //
+    // Use of a mercurial URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    HG = 5;
+
+    // The URL of the upstream darcs repository this package is retrieved
+    // from. For example:
+    //   - https://hub.darcs.net/hu.dwim/hu.dwim.util
+    //
+    // Use of a DARCS URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    DARCS = 6;
+
+    PIPER = 7;
+
+    // A URL that does not fit any other type. This may also indicate that the
+    // source code was received via email or some other out-of-band way. This is
+    // most commonly used with commercial software received directly from the
+    // vendor. In the case of email, the URL value can be used to provide
+    // additional information about how it was received.
+    OTHER = 8;
+
+    // The URL identifying where the local copy of the package source code can
+    // be found.
+    //
+    // Typically, the metadata files describing a package reside in the same
+    // directory as the source code for the package. In a few rare cases where
+    // they are separate, the LOCAL_SOURCE URL identifies where to find the
+    // source code. This only describes where to find the local copy of the
+    // source; there should always be an additional URL describing where the
+    // package was retrieved from.
+    //
+    // Examples:
+    //  - https://android.googlesource.com/platform/external/apache-http/
+    LOCAL_SOURCE = 9;
+  }
+
+  // The type of resource this URL identifies.
+  optional Type type = 1;
+
+  // The actual URL value.  URLs should be absolute and start with 'http://' or
+  // 'https://' (or occasionally 'git://' or 'ftp://' where appropriate).
+  optional string value = 2;
+}
+
+// License type that identifies how the packages may be used.
+enum LicenseType {
+  BY_EXCEPTION_ONLY = 1;
+  NOTICE = 2;
+  PERMISSIVE = 3;
+  RECIPROCAL = 4;
+  RESTRICTED_IF_STATICALLY_LINKED = 5;
+  RESTRICTED = 6;
+  UNENCUMBERED = 7;
+}
+
+// Identifies security related metadata including risk category and any special
+// instructions for using the package.
+message Security {
+  // Security risk category for a package, as determined by an ISE-TPS review.
+  enum Category {
+    CATEGORY_UNSPECIFIED = 0;
+
+    // Package should only be used in a sandboxed environment.
+    // Package should have restricted visibility.
+    SANDBOXED_ONLY = 1;
+
+    // Package should not be used to process user content. It is considered
+    // safe to use to process trusted data only. Package should have restricted
+    // visibility.
+    TRUSTED_DATA_ONLY = 2;
+
+    // Package is considered safe to use.
+    REVIEWED_AND_SECURE = 3;
+  }
+
+  // Identifies the security risk category for the package.  This will be
+  // provided by the ISE-TPS team as the result of a security review of the
+  // package.
+  optional Category category = 1;
+
+  // An additional security note for the package.
+  optional string note = 2;
+
+  // Text tag to categorize the package. It's currently used by security to:
+  // - to disable OSV (https://osv.dev)
+  // support via the `OSV:disable` tag
+  // - to attach CPE to their corresponding packages, for vulnerability
+  // monitoring:
+  //
+  // Please do document your usecase here should you want to add one.
+  repeated string tag = 3;
+
+  // ID of advisories fixed with a mitigated patch, for example CVE-2018-1111.
+  repeated string mitigated_security_patch = 4;
+}
+
+enum DirectoryType {
+  UNDEFINED = 0;
+
+  // This directory represents a package.
+  PACKAGE = 1;
+
+  // This directory is designed to organize multiple third-party PACKAGE
+  // directories.
+  GROUP = 2;
+
+  // This directory contains several PACKAGE directories representing
+  // different versions of the same third-party project.
+  VERSIONS = 3;
+}
+
+// Represents a whole or partial calendar date, such as a birthday. The time of
+// day and time zone are either specified elsewhere or are insignificant. The
+// date is relative to the Gregorian Calendar. This can represent one of the
+// following:
+//
+// * A full date, with non-zero year, month, and day values.
+// * A month and day, with a zero year (for example, an anniversary).
+// * A year on its own, with a zero month and a zero day.
+// * A year and month, with a zero day (for example, a credit card expiration
+//   date).
+message Date {
+  // Year of the date. Must be from 1 to 9999, or 0 to specify a date without
+  // a year.
+  optional int32 year = 1;
+  // Month of a year. Must be from 1 to 12, or 0 to specify a year without a
+  // month and day.
+  optional int32 month = 2;
+  // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0
+  // to specify a year by itself or a year and month where the day isn't
+  // significant.
+  optional int32 day = 3;
+}
+
+// Reference to external SBOM document and element corresponding to the package.
+// See https://spdx.github.io/spdx-spec/v2.3/document-creation-information/#66-external-document-references-field
+message SBOMRef {
+  // The URL that points to the SBOM document of the upstream package of this
+  // third_party package.
+  optional string url = 1;
+  // Checksum of the SBOM document the url field points to.
+  // Format: e.g. SHA1:<checksum>, or any algorithm defined in
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#8.4
+  optional string checksum = 2;
+  // SPDXID of the upstream package/file defined in the SBOM document the url field points to.
+  // Format: SPDXRef-[a-zA-Z0-9.-]+, see
+  // https://spdx.github.io/spdx-spec/v2.3/package-information/#72-package-spdx-identifier-field or
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#82-file-spdx-identifier-field
+  optional string element_id = 3;
+}
\ No newline at end of file
diff --git a/tools/rbcrun/Android.bp b/tools/rbcrun/Android.bp
index 90173ac..4fab858 100644
--- a/tools/rbcrun/Android.bp
+++ b/tools/rbcrun/Android.bp
@@ -19,7 +19,7 @@
 
 blueprint_go_binary {
     name: "rbcrun",
-    srcs: ["cmd/rbcrun.go"],
+    srcs: ["rbcrun/rbcrun.go"],
     deps: ["rbcrun-module"],
 }
 
@@ -34,6 +34,7 @@
     pkgPath: "rbcrun",
     deps: [
         "go-starlark-starlark",
+        "go-starlark-starlarkjson",
         "go-starlark-starlarkstruct",
         "go-starlark-starlarktest",
     ],
diff --git a/tools/rbcrun/cmd/rbcrun.go b/tools/rbcrun/cmd/rbcrun.go
deleted file mode 100644
index 4db6a0b..0000000
--- a/tools/rbcrun/cmd/rbcrun.go
+++ /dev/null
@@ -1,98 +0,0 @@
-// Copyright 2021 Google LLC
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-//      http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-package main
-
-import (
-	"flag"
-	"fmt"
-	"go.starlark.net/starlark"
-	"os"
-	"rbcrun"
-	"strings"
-)
-
-var (
-	execprog = flag.String("c", "", "execute program `prog`")
-	rootdir  = flag.String("d", ".", "the value of // for load paths")
-	file     = flag.String("f", "", "file to execute")
-	perfFile = flag.String("perf", "", "save performance data")
-)
-
-func main() {
-	flag.Parse()
-	filename := *file
-	var src interface{}
-	var env []string
-
-	rc := 0
-	for _, arg := range flag.Args() {
-		if strings.Contains(arg, "=") {
-			env = append(env, arg)
-		} else if filename == "" {
-			filename = arg
-		} else {
-			quit("only one file can be executed\n")
-		}
-	}
-	if *execprog != "" {
-		if filename != "" {
-			quit("either -c or file name should be present\n")
-		}
-		filename = "<cmdline>"
-		src = *execprog
-	}
-	if filename == "" {
-		if len(env) > 0 {
-			fmt.Fprintln(os.Stderr,
-				"no file to run -- if your file's name contains '=', use -f to specify it")
-		}
-		flag.Usage()
-		os.Exit(1)
-	}
-	if stat, err := os.Stat(*rootdir); os.IsNotExist(err) || !stat.IsDir() {
-		quit("%s is not a directory\n", *rootdir)
-	}
-	if *perfFile != "" {
-		pprof, err := os.Create(*perfFile)
-		if err != nil {
-			quit("%s: err", *perfFile)
-		}
-		defer pprof.Close()
-		if err := starlark.StartProfile(pprof); err != nil {
-			quit("%s\n", err)
-		}
-	}
-	rbcrun.LoadPathRoot = *rootdir
-	err := rbcrun.Run(filename, src, env)
-	if *perfFile != "" {
-		if err2 := starlark.StopProfile(); err2 != nil {
-			fmt.Fprintln(os.Stderr, err2)
-			rc = 1
-		}
-	}
-	if err != nil {
-		if evalErr, ok := err.(*starlark.EvalError); ok {
-			quit("%s\n", evalErr.Backtrace())
-		} else {
-			quit("%s\n", err)
-		}
-	}
-	os.Exit(rc)
-}
-
-func quit(format string, s ...interface{}) {
-	fmt.Fprintf(os.Stderr, format, s...)
-	os.Exit(2)
-}
diff --git a/tools/rbcrun/go.mod b/tools/rbcrun/go.mod
index a029eb4..5ae2972 100644
--- a/tools/rbcrun/go.mod
+++ b/tools/rbcrun/go.mod
@@ -1,9 +1,6 @@
 module rbcrun
 
-require (
-	github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d // indirect
-	go.starlark.net v0.0.0-20201006213952-227f4aabceb5
-)
+require go.starlark.net v0.0.0-20201006213952-227f4aabceb5
 
 replace go.starlark.net => ../../../../external/starlark-go
 
diff --git a/tools/rbcrun/go.sum b/tools/rbcrun/go.sum
index db4d51e..10761a8 100644
--- a/tools/rbcrun/go.sum
+++ b/tools/rbcrun/go.sum
@@ -1,11 +1,8 @@
 cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
-github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=
 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
-github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=
 github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
 github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
 github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -26,8 +23,6 @@
 github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
 github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -44,9 +39,6 @@
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index 32afa45..1d68d43 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -24,13 +24,20 @@
 	"strings"
 
 	"go.starlark.net/starlark"
+	"go.starlark.net/starlarkjson"
 	"go.starlark.net/starlarkstruct"
 )
 
-const callerDirKey = "callerDir"
+type ExecutionMode int
+const (
+	ExecutionModeRbc ExecutionMode = iota
+	ExecutionModeMake ExecutionMode = iota
+)
 
-var LoadPathRoot = "."
-var shellPath string
+const allowExternalEntrypointKey = "allowExternalEntrypoint"
+const callerDirKey = "callerDir"
+const executionModeKey = "executionMode"
+const shellKey = "shell"
 
 type modentry struct {
 	globals starlark.StringDict
@@ -39,20 +46,68 @@
 
 var moduleCache = make(map[string]*modentry)
 
-var builtins starlark.StringDict
+var rbcBuiltins starlark.StringDict = starlark.StringDict{
+	"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
+	// To convert find-copy-subdir and product-copy-files-by pattern
+	"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
+	// To convert makefile's $(shell cmd)
+	"rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
+	// Output to stderr
+	"rblf_log": starlark.NewBuiltin("rblf_log", log),
+	// To convert makefile's $(wildcard foo*)
+	"rblf_wildcard": starlark.NewBuiltin("rblf_wildcard", wildcard),
+}
 
-func moduleName2AbsPath(moduleName string, callerDir string) (string, error) {
-	path := moduleName
-	if ix := strings.LastIndex(path, ":"); ix >= 0 {
-		path = path[0:ix] + string(os.PathSeparator) + path[ix+1:]
+var makeBuiltins starlark.StringDict = starlark.StringDict{
+	"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
+	"json": starlarkjson.Module,
+}
+
+// Takes a module name (the first argument to the load() function) and returns the path
+// it's trying to load, stripping out leading //, and handling leading :s.
+func cleanModuleName(moduleName string, callerDir string, allowExternalPaths bool) (string, error) {
+	if strings.Count(moduleName, ":") > 1 {
+		return "", fmt.Errorf("at most 1 colon must be present in starlark path: %s", moduleName)
 	}
-	if strings.HasPrefix(path, "//") {
-		return filepath.Abs(filepath.Join(LoadPathRoot, path[2:]))
+
+	// We don't have full support for external repositories, but at least support skylib's dicts.
+	if moduleName == "@bazel_skylib//lib:dicts.bzl" {
+		return "external/bazel-skylib/lib/dicts.bzl", nil
+	}
+
+	localLoad := false
+	if strings.HasPrefix(moduleName, "@//") {
+		moduleName = moduleName[3:]
+	} else if strings.HasPrefix(moduleName, "//") {
+		moduleName = moduleName[2:]
 	} else if strings.HasPrefix(moduleName, ":") {
-		return filepath.Abs(filepath.Join(callerDir, path[1:]))
-	} else {
-		return filepath.Abs(path)
+		moduleName = moduleName[1:]
+		localLoad = true
+	} else if !allowExternalPaths {
+		return "", fmt.Errorf("load path must start with // or :")
 	}
+
+	if ix := strings.LastIndex(moduleName, ":"); ix >= 0 {
+		moduleName = moduleName[:ix] + string(os.PathSeparator) + moduleName[ix+1:]
+	}
+
+	if filepath.Clean(moduleName) != moduleName {
+		return "", fmt.Errorf("load path must be clean, found: %s, expected: %s", moduleName, filepath.Clean(moduleName))
+	}
+	if !allowExternalPaths {
+		if strings.HasPrefix(moduleName, "../") {
+			return "", fmt.Errorf("load path must not start with ../: %s", moduleName)
+		}
+		if strings.HasPrefix(moduleName, "/") {
+			return "", fmt.Errorf("load path starts with /, use // for a absolute path: %s", moduleName)
+		}
+	}
+
+	if localLoad {
+		return filepath.Join(callerDir, moduleName), nil
+	}
+
+	return moduleName, nil
 }
 
 // loader implements load statement. The format of the loaded module URI is
@@ -61,14 +116,19 @@
 // The presence of `|symbol` indicates that the loader should return a single 'symbol'
 // bound to None if file is missing.
 func loader(thread *starlark.Thread, module string) (starlark.StringDict, error) {
-	pipePos := strings.LastIndex(module, "|")
-	mustLoad := pipePos < 0
+	mode := thread.Local(executionModeKey).(ExecutionMode)
+	allowExternalEntrypoint := thread.Local(allowExternalEntrypointKey).(bool)
 	var defaultSymbol string
-	if !mustLoad {
-		defaultSymbol = module[pipePos+1:]
-		module = module[:pipePos]
+	mustLoad := true
+	if mode == ExecutionModeRbc {
+		pipePos := strings.LastIndex(module, "|")
+		if pipePos >= 0 {
+			mustLoad = false
+			defaultSymbol = module[pipePos+1:]
+			module = module[:pipePos]
+		}
 	}
-	modulePath, err := moduleName2AbsPath(module, thread.Local(callerDirKey).(string))
+	modulePath, err := cleanModuleName(module, thread.Local(callerDirKey).(string), allowExternalEntrypoint)
 	if err != nil {
 		return nil, err
 	}
@@ -99,9 +159,20 @@
 				childThread.SetLocal(testReporterKey, v)
 			}
 
+			// Only the entrypoint starlark file allows external loads.
+			childThread.SetLocal(allowExternalEntrypointKey, false)
 			childThread.SetLocal(callerDirKey, filepath.Dir(modulePath))
-			globals, err := starlark.ExecFile(childThread, modulePath, nil, builtins)
-			e = &modentry{globals, err}
+			childThread.SetLocal(executionModeKey, mode)
+			childThread.SetLocal(shellKey, thread.Local(shellKey))
+			if mode == ExecutionModeRbc {
+				globals, err := starlark.ExecFile(childThread, modulePath, nil, rbcBuiltins)
+				e = &modentry{globals, err}
+			} else if mode == ExecutionModeMake {
+				globals, err := starlark.ExecFile(childThread, modulePath, nil, makeBuiltins)
+				e = &modentry{globals, err}
+			} else {
+				return nil, fmt.Errorf("unknown executionMode %d", mode)
+			}
 		} else {
 			e = &modentry{starlark.StringDict{defaultSymbol: starlark.None}, nil}
 		}
@@ -189,12 +260,13 @@
 // its output the same way as Make's $(shell ) function. The end-of-lines
 // ("\n" or "\r\n") are replaced with " " in the result, and the trailing
 // end-of-line is removed.
-func shell(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+func shell(thread *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
 	kwargs []starlark.Tuple) (starlark.Value, error) {
 	var command string
 	if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &command); err != nil {
 		return starlark.None, err
 	}
+	shellPath := thread.Local(shellKey).(string)
 	if shellPath == "" {
 		return starlark.None,
 			fmt.Errorf("cannot run shell, /bin/sh is missing (running on Windows?)")
@@ -223,16 +295,6 @@
 	return starlark.NewList(elems)
 }
 
-// propsetFromEnv constructs a propset from the array of KEY=value strings
-func structFromEnv(env []string) *starlarkstruct.Struct {
-	sd := make(map[string]starlark.Value, len(env))
-	for _, x := range env {
-		kv := strings.SplitN(x, "=", 2)
-		sd[kv[0]] = starlark.String(kv[1])
-	}
-	return starlarkstruct.FromStringDict(starlarkstruct.Default, sd)
-}
-
 func log(thread *starlark.Thread, fn *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {
 	sep := " "
 	if err := starlark.UnpackArgs("print", nil, kwargs, "sep?", &sep); err != nil {
@@ -255,50 +317,69 @@
 	return starlark.None, nil
 }
 
-func setup(env []string) {
-	// Create the symbols that aid makefile conversion. See README.md
-	builtins = starlark.StringDict{
-		"struct":   starlark.NewBuiltin("struct", starlarkstruct.Make),
-		"rblf_cli": structFromEnv(env),
-		"rblf_env": structFromEnv(os.Environ()),
-		// To convert find-copy-subdir and product-copy-files-by pattern
-		"rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
-		// To convert makefile's $(shell cmd)
-		"rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
-		// Output to stderr
-		"rblf_log": starlark.NewBuiltin("rblf_log", log),
-		// To convert makefile's $(wildcard foo*)
-		"rblf_wildcard": starlark.NewBuiltin("rblf_wildcard", wildcard),
-	}
-
-	// NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
-	// which always uses /bin/sh to run the command
-	shellPath = "/bin/sh"
-	if _, err := os.Stat(shellPath); err != nil {
-		shellPath = ""
-	}
-}
-
 // Parses, resolves, and executes a Starlark file.
 // filename and src parameters are as for starlark.ExecFile:
 // * filename is the name of the file to execute,
 //   and the name that appears in error messages;
 // * src is an optional source of bytes to use instead of filename
 //   (it can be a string, or a byte array, or an io.Reader instance)
-// * commandVars is an array of "VAR=value" items. They are accessible from
-//   the starlark script as members of the `rblf_cli` propset.
-func Run(filename string, src interface{}, commandVars []string) error {
-	setup(commandVars)
+// Returns the top-level starlark variables, the list of starlark files loaded, and an error
+func Run(filename string, src interface{}, mode ExecutionMode, allowExternalEntrypoint bool) (starlark.StringDict, []string, error) {
+	// NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
+	// which always uses /bin/sh to run the command
+	shellPath := "/bin/sh"
+	if _, err := os.Stat(shellPath); err != nil {
+		shellPath = ""
+	}
 
 	mainThread := &starlark.Thread{
 		Name:  "main",
-		Print: func(_ *starlark.Thread, msg string) { fmt.Println(msg) },
+		Print: func(_ *starlark.Thread, msg string) {
+			if mode == ExecutionModeRbc {
+				// In rbc mode, rblf_log is used to print to stderr
+				fmt.Println(msg)
+			} else if mode == ExecutionModeMake {
+				fmt.Fprintln(os.Stderr, msg)
+			}
+		},
 		Load:  loader,
 	}
-	absPath, err := filepath.Abs(filename)
-	if err == nil {
-		mainThread.SetLocal(callerDirKey, filepath.Dir(absPath))
-		_, err = starlark.ExecFile(mainThread, absPath, src, builtins)
+	filename, err := filepath.Abs(filename)
+	if err != nil {
+		return nil, nil, err
 	}
-	return err
+	if wd, err := os.Getwd(); err == nil {
+		filename, err = filepath.Rel(wd, filename)
+		if err != nil {
+			return nil, nil, err
+		}
+		if !allowExternalEntrypoint && strings.HasPrefix(filename, "../") {
+			return nil, nil, fmt.Errorf("path could not be made relative to workspace root: %s", filename)
+		}
+	} else {
+		return nil, nil, err
+	}
+
+	// Add top-level file to cache for cycle detection purposes
+	moduleCache[filename] = nil
+
+	var results starlark.StringDict
+	mainThread.SetLocal(allowExternalEntrypointKey, allowExternalEntrypoint)
+	mainThread.SetLocal(callerDirKey, filepath.Dir(filename))
+	mainThread.SetLocal(executionModeKey, mode)
+	mainThread.SetLocal(shellKey, shellPath)
+	if mode == ExecutionModeRbc {
+		results, err = starlark.ExecFile(mainThread, filename, src, rbcBuiltins)
+	} else if mode == ExecutionModeMake {
+		results, err = starlark.ExecFile(mainThread, filename, src, makeBuiltins)
+	} else {
+		return results, nil, fmt.Errorf("unknown executionMode %d", mode)
+	}
+	loadedStarlarkFiles := make([]string, 0, len(moduleCache))
+	for file := range moduleCache {
+		loadedStarlarkFiles = append(loadedStarlarkFiles, file)
+	}
+	sort.Strings(loadedStarlarkFiles)
+
+	return results, loadedStarlarkFiles, err
 }
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
index 97f6ce9..10ce55e 100644
--- a/tools/rbcrun/host_test.go
+++ b/tools/rbcrun/host_test.go
@@ -53,8 +53,7 @@
 }
 
 // Common setup for the tests: create thread, change to the test directory
-func testSetup(t *testing.T, env []string) *starlark.Thread {
-	setup(env)
+func testSetup(t *testing.T) *starlark.Thread {
 	thread := &starlark.Thread{
 		Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 			if module == "assert.star" {
@@ -72,14 +71,15 @@
 func dataDir() string {
 	_, thisSrcFile, _, _ := runtime.Caller(0)
 	return filepath.Join(filepath.Dir(thisSrcFile), "testdata")
-
 }
 
 func exerciseStarlarkTestFile(t *testing.T, starFile string) {
 	// In order to use "assert.star" from go/starlark.net/starlarktest in the tests, provide:
 	//  * load function that handles "assert.star"
 	//  * starlarktest.DataFile function that finds its location
-	setup(nil)
+	if err := os.Chdir(dataDir()); err != nil {
+		t.Fatal(err)
+	}
 	thread := &starlark.Thread{
 		Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 			if module == "assert.star" {
@@ -90,21 +90,9 @@
 	starlarktest.SetReporter(thread, t)
 	_, thisSrcFile, _, _ := runtime.Caller(0)
 	filename := filepath.Join(filepath.Dir(thisSrcFile), starFile)
-	if _, err := starlark.ExecFile(thread, filename, nil, builtins); err != nil {
-		if err, ok := err.(*starlark.EvalError); ok {
-			t.Fatal(err.Backtrace())
-		}
-		t.Fatal(err)
-	}
-}
-
-func TestCliAndEnv(t *testing.T) {
-	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	if err := os.Setenv("TEST_ENVIRONMENT_FOO", "test_environment_foo"); err != nil {
-		t.Fatal(err)
-	}
-	thread := testSetup(t, []string{"CLI_FOO=foo"})
-	if _, err := starlark.ExecFile(thread, "cli_and_env.star", nil, builtins); err != nil {
+	thread.SetLocal(executionModeKey, ExecutionModeRbc)
+	thread.SetLocal(shellKey, "/bin/sh")
+	if _, err := starlark.ExecFile(thread, filename, nil, rbcBuiltins); err != nil {
 		if err, ok := err.(*starlark.EvalError); ok {
 			t.Fatal(err.Backtrace())
 		}
@@ -114,11 +102,8 @@
 
 func TestFileOps(t *testing.T) {
 	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
-		t.Fatal(err)
-	}
-	thread := testSetup(t, nil)
-	if _, err := starlark.ExecFile(thread, "file_ops.star", nil, builtins); err != nil {
+	thread := testSetup(t)
+	if _, err := starlark.ExecFile(thread, "file_ops.star", nil, rbcBuiltins); err != nil {
 		if err, ok := err.(*starlark.EvalError); ok {
 			t.Fatal(err.Backtrace())
 		}
@@ -128,7 +113,7 @@
 
 func TestLoad(t *testing.T) {
 	// TODO(asmundak): convert this to use exerciseStarlarkTestFile
-	thread := testSetup(t, nil)
+	thread := testSetup(t)
 	thread.Load = func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
 		if module == "assert.star" {
 			return starlarktest.LoadAssertModule()
@@ -137,9 +122,13 @@
 		}
 	}
 	dir := dataDir()
+	if err := os.Chdir(filepath.Dir(dir)); err != nil {
+		t.Fatal(err)
+	}
+	thread.SetLocal(allowExternalEntrypointKey, false)
 	thread.SetLocal(callerDirKey, dir)
-	LoadPathRoot = filepath.Dir(dir)
-	if _, err := starlark.ExecFile(thread, "load.star", nil, builtins); err != nil {
+	thread.SetLocal(executionModeKey, ExecutionModeRbc)
+	if _, err := starlark.ExecFile(thread, "testdata/load.star", nil, rbcBuiltins); err != nil {
 		if err, ok := err.(*starlark.EvalError); ok {
 			t.Fatal(err.Backtrace())
 		}
@@ -148,8 +137,5 @@
 }
 
 func TestShell(t *testing.T) {
-	if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
-		t.Fatal(err)
-	}
 	exerciseStarlarkTestFile(t, "testdata/shell.star")
 }
diff --git a/tools/rbcrun/rbcrun/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
new file mode 100644
index 0000000..a15b867
--- /dev/null
+++ b/tools/rbcrun/rbcrun/rbcrun.go
@@ -0,0 +1,190 @@
+// Copyright 2021 Google LLC
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package main
+
+import (
+	"flag"
+	"fmt"
+	"os"
+	"rbcrun"
+	"regexp"
+	"strings"
+
+	"go.starlark.net/starlark"
+)
+
+var (
+	allowExternalEntrypoint = flag.Bool("allow_external_entrypoint", false, "allow the entrypoint starlark file to be outside of the source tree")
+	modeFlag  = flag.String("mode", "", "the general behavior of rbcrun. Can be \"rbc\" or \"make\". Required.")
+	rootdir  = flag.String("d", ".", "the value of // for load paths")
+	perfFile = flag.String("perf", "", "save performance data")
+	identifierRe = regexp.MustCompile("[a-zA-Z_][a-zA-Z0-9_]*")
+)
+
+func getEntrypointStarlarkFile() string {
+	filename := ""
+
+	for _, arg := range flag.Args() {
+		if filename == "" {
+			filename = arg
+		} else {
+			quit("only one file can be executed\n")
+		}
+	}
+	if filename == "" {
+		flag.Usage()
+		os.Exit(1)
+	}
+	return filename
+}
+
+func getMode() rbcrun.ExecutionMode {
+	switch *modeFlag {
+	case "rbc":
+		return rbcrun.ExecutionModeRbc
+	case "make":
+		return rbcrun.ExecutionModeMake
+	case "":
+		quit("-mode flag is required.")
+	default:
+		quit("Unknown -mode value %q, expected 1 of \"rbc\", \"make\"", *modeFlag)
+	}
+	return rbcrun.ExecutionModeMake
+}
+
+var makeStringReplacer = strings.NewReplacer("#", "\\#", "$", "$$")
+
+func cleanStringForMake(s string) (string, error) {
+	if strings.ContainsAny(s, "\\\n") {
+		// \\ in make is literally \\, not a single \, so we can't allow them.
+		// \<newline> in make will produce a space, not a newline.
+		return "", fmt.Errorf("starlark strings exported to make cannot contain backslashes or newlines")
+	}
+	return makeStringReplacer.Replace(s), nil
+}
+
+func getValueInMakeFormat(value starlark.Value, allowLists bool) (string, error) {
+	switch v := value.(type) {
+	case starlark.String:
+		if cleanedValue, err := cleanStringForMake(v.GoString()); err == nil {
+			return cleanedValue, nil
+		} else {
+			return "", err
+		}
+	case starlark.Int:
+		return v.String(), nil
+	case *starlark.List:
+		if !allowLists {
+			return "", fmt.Errorf("nested lists are not allowed to be exported from starlark to make, flatten the list in starlark first")
+		}
+		result := ""
+		for i := 0; i < v.Len(); i++ {
+			value, err := getValueInMakeFormat(v.Index(i), false)
+			if err != nil {
+				return "", err
+			}
+			if i > 0 {
+				result += " "
+			}
+			result += value
+		}
+		return result, nil
+	default:
+		return "", fmt.Errorf("only starlark strings, ints, and lists of strings/ints can be exported to make. Please convert all other types in starlark first. Found type: %s", value.Type())
+	}
+}
+
+func printVarsInMakeFormat(globals starlark.StringDict) error {
+	// We could just directly export top level variables by name instead of going through
+	// a variables_to_export_to_make dictionary, but that wouldn't allow for exporting a
+	// runtime-defined number of variables to make. This can be important because dictionaries
+	// in make are often represented by a unique variable for every key in the dictionary.
+	variablesValue, ok := globals["variables_to_export_to_make"]
+	if !ok {
+		return fmt.Errorf("expected top-level starlark file to have a \"variables_to_export_to_make\" variable")
+	}
+	variables, ok := variablesValue.(*starlark.Dict)
+	if !ok {
+		return fmt.Errorf("expected variables_to_export_to_make to be a dict, got %s", variablesValue.Type())
+	}
+
+	for _, varTuple := range variables.Items() {
+		varNameStarlark, ok := varTuple.Index(0).(starlark.String)
+		if !ok {
+			return fmt.Errorf("all keys in variables_to_export_to_make must be strings, but got %q", varTuple.Index(0).Type())
+		}
+		varName := varNameStarlark.GoString()
+		if !identifierRe.MatchString(varName) {
+			return fmt.Errorf("all variables at the top level starlark file must be valid c identifiers, but got %q", varName)
+		}
+		if varName == "LOADED_STARLARK_FILES" {
+			return fmt.Errorf("the name LOADED_STARLARK_FILES is reserved for use by the starlark interpreter")
+		}
+		valueMake, err := getValueInMakeFormat(varTuple.Index(1), true)
+		if err != nil {
+			return err
+		}
+		// The :=$= is special Kati syntax that means "set and make readonly"
+		fmt.Printf("%s :=$= %s\n", varName, valueMake)
+	}
+	return nil
+}
+
+func main() {
+	flag.Parse()
+	filename := getEntrypointStarlarkFile()
+	mode := getMode()
+
+	if os.Chdir(*rootdir) != nil {
+		quit("could not chdir to %s\n", *rootdir)
+	}
+	if *perfFile != "" {
+		pprof, err := os.Create(*perfFile)
+		if err != nil {
+			quit("%s: err", *perfFile)
+		}
+		defer pprof.Close()
+		if err := starlark.StartProfile(pprof); err != nil {
+			quit("%s\n", err)
+		}
+	}
+	variables, loadedStarlarkFiles, err := rbcrun.Run(filename, nil, mode, *allowExternalEntrypoint)
+	rc := 0
+	if *perfFile != "" {
+		if err2 := starlark.StopProfile(); err2 != nil {
+			fmt.Fprintln(os.Stderr, err2)
+			rc = 1
+		}
+	}
+	if err != nil {
+		if evalErr, ok := err.(*starlark.EvalError); ok {
+			quit("%s\n", evalErr.Backtrace())
+		} else {
+			quit("%s\n", err)
+		}
+	}
+	if mode == rbcrun.ExecutionModeMake {
+		if err := printVarsInMakeFormat(variables); err != nil {
+			quit("%s\n", err)
+		}
+		fmt.Printf("LOADED_STARLARK_FILES := %s\n", strings.Join(loadedStarlarkFiles, " "))
+	}
+	os.Exit(rc)
+}
+
+func quit(format string, s ...interface{}) {
+	fmt.Fprintf(os.Stderr, format, s...)
+	os.Exit(2)
+}
diff --git a/tools/rbcrun/testdata/cli_and_env.star b/tools/rbcrun/testdata/cli_and_env.star
deleted file mode 100644
index d6f464a..0000000
--- a/tools/rbcrun/testdata/cli_and_env.star
+++ /dev/null
@@ -1,11 +0,0 @@
-# Tests rblf_env access
-load("assert.star", "assert")
-
-
-def test():
-    assert.eq(rblf_env.TEST_ENVIRONMENT_FOO, "test_environment_foo")
-    assert.fails(lambda: rblf_env.FOO_BAR_BAZ, ".*struct has no .FOO_BAR_BAZ attribute$")
-    assert.eq(rblf_cli.CLI_FOO, "foo")
-
-
-test()
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 2ee78fc..b2b907c 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -1,22 +1,21 @@
 # Tests file ops builtins
 load("assert.star", "assert")
 
-
 def test():
     myname = "file_ops.star"
     files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in  %s" % (myname, files))
-    files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
+    files = rblf_wildcard("*.star")
     assert.true(myname in files, "expected %s in %s" % (myname, files))
     files = rblf_wildcard("*.xxx")
     assert.true(len(files) == 0, "expansion should be empty but contains %s" % files)
     mydir = "testdata"
     myrelname = "%s/%s" % (mydir, myname)
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*")
+    files = rblf_find_files("../", "*")
     assert.true(mydir in files and myrelname in files, "expected %s and %s in %s" % (mydir, myrelname, files))
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*", only_files=1)
+    files = rblf_find_files("../", "*", only_files=1)
     assert.true(mydir not in files, "did not expect %s in %s" % (mydir, files))
     assert.true(myrelname in files, "expected %s  in %s" % (myrelname, files))
-    files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*.star")
+    files = rblf_find_files("../", "*.star")
     assert.true(myrelname in files, "expected %s in %s" % (myrelname, files))
 test()
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
index be04f75..02919a0 100644
--- a/tools/rbcrun/testdata/module1.star
+++ b/tools/rbcrun/testdata/module1.star
@@ -2,6 +2,6 @@
 load("assert.star", "assert")
 
 # Make sure that builtins are defined for the loaded module, too
-assert.true(rblf_wildcard("module1.star"))
-assert.true(not rblf_wildcard("no_such file"))
+assert.true(rblf_wildcard("testdata/module1.star"))
+assert.true(not rblf_wildcard("testdata/no_such file"))
 test = "module1"
diff --git a/tools/rbcrun/testdata/shell.star b/tools/rbcrun/testdata/shell.star
index ad10697..dd17375 100644
--- a/tools/rbcrun/testdata/shell.star
+++ b/tools/rbcrun/testdata/shell.star
@@ -1,5 +1,5 @@
 # Tests "queue" data type
 load("assert.star", "assert")
 
-assert.eq("load.star shell.star", rblf_shell("cd %s && ls -1 shell.star load.star 2>&1" % rblf_env.TEST_DATA_DIR))
-assert.eq("shell.star", rblf_shell("cd %s && echo shell.sta*" % rblf_env.TEST_DATA_DIR))
+assert.eq("load.star shell.star", rblf_shell("ls -1 shell.star load.star 2>&1"))
+assert.eq("shell.star", rblf_shell("echo shell.sta*"))
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 29fc771..a76dc8a 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -334,6 +334,9 @@
         "ota_utils.py",
         "payload_signer.py",
     ],
+    libs: [
+        "releasetools_common",
+    ],
 }
 
 python_binary_host {
@@ -356,6 +359,21 @@
 }
 
 python_binary_host {
+    name: "create_brick_ota",
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    srcs: [
+        "create_brick_ota.py",
+    ],
+    libs: [
+        "ota_utils_lib",
+    ],
+}
+
+python_binary_host {
     name: "build_image",
     defaults: [
         "releasetools_binary_defaults",
@@ -598,6 +616,7 @@
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
         ":com.android.apex.compressed.v1_original",
+        ":com.android.apex.vendor.foo.with_vintf"
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e154a0f..465d222 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -65,9 +65,10 @@
 import ota_metadata_pb2
 import rangelib
 import sparse_img
-
+from concurrent.futures import ThreadPoolExecutor
 from apex_utils import GetApexInfoFromTargetFiles
 from common import ZipDelete, PARTITIONS_WITH_CARE_MAP, ExternalError, RunAndCheckOutput, IsSparseImage, MakeTempFile, ZipWrite
+from build_image import FIXED_FILE_TIMESTAMP
 
 if sys.hexversion < 0x02070000:
   print("Python 2.7 or newer is required.", file=sys.stderr)
@@ -81,12 +82,6 @@
 OPTIONS.replace_updated_files_list = []
 OPTIONS.is_signing = False
 
-# Use a fixed timestamp (01/01/2009 00:00:00 UTC) for files when packaging
-# images. (b/24377993, b/80600931)
-FIXED_FILE_TIMESTAMP = int((
-    datetime.datetime(2009, 1, 1, 0, 0, 0, 0, None) -
-    datetime.datetime.utcfromtimestamp(0)).total_seconds())
-
 
 def ParseAvbFooter(img_path) -> avbtool.AvbFooter:
   with open(img_path, 'rb') as fp:
@@ -594,15 +589,6 @@
   if block_list:
     image_props["block_list"] = block_list.name
 
-  # Use repeatable ext4 FS UUID and hash_seed UUID (based on partition name and
-  # build fingerprint). Also use the legacy build id, because the vbmeta digest
-  # isn't available at this point.
-  build_info = common.BuildInfo(info_dict, use_legacy_id=True)
-  uuid_seed = what + "-" + build_info.GetPartitionFingerprint(what)
-  image_props["uuid"] = str(uuid.uuid5(uuid.NAMESPACE_URL, uuid_seed))
-  hash_seed = "hash_seed-" + uuid_seed
-  image_props["hash_seed"] = str(uuid.uuid5(uuid.NAMESPACE_URL, hash_seed))
-
   build_image.BuildImage(
       os.path.join(input_dir, what.upper()), image_props, output_file.name)
 
@@ -818,6 +804,9 @@
   """Create a super_empty.img and store it in output_zip."""
 
   img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "super_empty.img")
+  if os.path.exists(img.name):
+    logger.info("super_empty.img already exists; no need to rebuild...")
+    return
   build_super_image.BuildSuperImage(OPTIONS.info_dict, img.name)
   img.Write()
 
@@ -842,13 +831,14 @@
   SYSTEM/ after rebuilding recovery.
   """
   common.ZipDelete(zip_filename, files_list)
-  with zipfile.ZipFile(zip_filename, "a",
+  output_zip = zipfile.ZipFile(zip_filename, "a",
                                compression=zipfile.ZIP_DEFLATED,
-                               allowZip64=True) as output_zip:
-    for item in files_list:
-      file_path = os.path.join(OPTIONS.input_tmp, item)
-      assert os.path.exists(file_path)
-      common.ZipWrite(output_zip, file_path, arcname=item)
+                               allowZip64=True)
+  for item in files_list:
+    file_path = os.path.join(OPTIONS.input_tmp, item)
+    assert os.path.exists(file_path)
+    common.ZipWrite(output_zip, file_path, arcname=item)
+  common.ZipClose(output_zip)
 
 
 def HasPartition(partition_name):
@@ -1079,8 +1069,15 @@
       ("system_dlkm", has_system_dlkm, AddSystemDlkm, []),
       ("system_other", has_system_other, AddSystemOther, []),
   )
-  for call in add_partition_calls:
-    add_partition(*call)
+  # If output_zip exists, each add_partition_calls writes bytes to the same output_zip,
+  # which is not thread-safe. So, run them in serial if output_zip exists.
+  if output_zip:
+    for call in add_partition_calls:
+      add_partition(*call)
+  else:
+    with ThreadPoolExecutor(max_workers=len(add_partition_calls)) as executor:
+      for future in [executor.submit(add_partition, *call) for call in add_partition_calls]:
+        future.result()
 
   AddApexInfo(output_zip)
 
@@ -1133,14 +1130,18 @@
           item for item in vbmeta_partitions
           if item not in vbmeta_vendor.split()]
       vbmeta_partitions.append("vbmeta_vendor")
-    custom_avb_partitions = OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()
+    custom_avb_partitions = OPTIONS.info_dict.get(
+        "avb_custom_vbmeta_images_partition_list", "").strip().split()
     if custom_avb_partitions:
       for avb_part in custom_avb_partitions:
         partition_name = "vbmeta_" + avb_part
-        included_partitions = OPTIONS.info_dict.get("avb_vbmeta_{}".format(avb_part), "").strip().split()
-        assert included_partitions, "Custom vbmeta partition {0} missing avb_vbmeta_{0} prop".format(avb_part)
+        included_partitions = OPTIONS.info_dict.get(
+            "avb_vbmeta_{}".format(avb_part), "").strip().split()
+        assert included_partitions, "Custom vbmeta partition {0} missing avb_vbmeta_{0} prop".format(
+            avb_part)
         banner(partition_name)
-        logger.info("VBMeta partition {} needs {}".format(partition_name, included_partitions))
+        logger.info("VBMeta partition {} needs {}".format(
+            partition_name, included_partitions))
         partitions[partition_name] = AddVBMeta(
             output_zip, partitions, partition_name, included_partitions)
         vbmeta_partitions = [
@@ -1148,7 +1149,6 @@
             if item not in included_partitions]
         vbmeta_partitions.append(partition_name)
 
-
     if OPTIONS.info_dict.get("avb_building_vbmeta_image") == "true":
       banner("vbmeta")
       AddVBMeta(output_zip, partitions, "vbmeta", vbmeta_partitions)
@@ -1191,7 +1191,7 @@
   AddVbmetaDigest(output_zip)
 
   if output_zip:
-    output_zip.close()
+    common.ZipClose(output_zip)
     if OPTIONS.replace_updated_files_list:
       ReplaceUpdatedFiles(output_zip.filename,
                           OPTIONS.replace_updated_files_list)
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 40f7c92..59c712e 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -431,7 +431,7 @@
   apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
   common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
   common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
-  apex_zip.close()
+  common.ZipClose(apex_zip)
 
   # 3. Sign the APEX container with container_key.
   signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
@@ -626,7 +626,7 @@
     if os.path.isfile(deapexer_path):
       deapexer = deapexer_path
 
-  for apex_filename in os.listdir(target_dir):
+  for apex_filename in sorted(os.listdir(target_dir)):
     apex_filepath = os.path.join(target_dir, apex_filename)
     if not os.path.isfile(apex_filepath) or \
             not zipfile.is_zipfile(apex_filepath):
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 9064136..11bd784 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -23,24 +23,34 @@
 """
 
 from __future__ import print_function
+import datetime
 
 import glob
 import logging
 import os
 import os.path
 import re
+import shlex
 import shutil
 import sys
+import uuid
 
 import common
 import verity_utils
 
+
 logger = logging.getLogger(__name__)
 
 OPTIONS = common.OPTIONS
 BLOCK_SIZE = common.BLOCK_SIZE
 BYTES_IN_MB = 1024 * 1024
 
+# Use a fixed timestamp (01/01/2009 00:00:00 UTC) for files when packaging
+# images. (b/24377993, b/80600931)
+FIXED_FILE_TIMESTAMP = int((
+    datetime.datetime(2009, 1, 1, 0, 0, 0, 0, None) -
+    datetime.datetime.utcfromtimestamp(0)).total_seconds())
+
 
 class BuildImageError(Exception):
   """An Exception raised during image building."""
@@ -487,6 +497,20 @@
     raise
 
 
+def SetUUIDIfNotExist(image_props):
+
+  # Use repeatable ext4 FS UUID and hash_seed UUID (based on partition name and
+  # build fingerprint). Also use the legacy build id, because the vbmeta digest
+  # isn't available at this point.
+  what = image_props["mount_point"]
+  fingerprint = image_props.get("fingerprint", "")
+  uuid_seed = what + "-" + fingerprint
+  logger.info("Using fingerprint %s for partition %s", fingerprint, what)
+  image_props["uuid"] = str(uuid.uuid5(uuid.NAMESPACE_URL, uuid_seed))
+  hash_seed = "hash_seed-" + uuid_seed
+  image_props["hash_seed"] = str(uuid.uuid5(uuid.NAMESPACE_URL, hash_seed))
+
+
 def BuildImage(in_dir, prop_dict, out_file, target_out=None):
   """Builds an image for the files under in_dir and writes it to out_file.
 
@@ -504,6 +528,7 @@
     BuildImageError: On build image failures.
   """
   in_dir, fs_config = SetUpInDirAndFsConfig(in_dir, prop_dict)
+  SetUUIDIfNotExist(prop_dict)
 
   build_command = []
   fs_type = prop_dict.get("fs_type", "")
@@ -635,6 +660,19 @@
     verity_image_builder.Build(out_file)
 
 
+def TryParseFingerprint(glob_dict: dict):
+  for (key, val) in glob_dict.items():
+    if not key.endswith("_add_hashtree_footer_args") and not key.endswith("_add_hash_footer_args"):
+      continue
+    for arg in shlex.split(val):
+      m = re.match(r"^com\.android\.build\.\w+\.fingerprint:", arg)
+      if m is None:
+        continue
+      fingerprint = arg[len(m.group()):]
+      glob_dict["fingerprint"] = fingerprint
+      return
+
+
 def ImagePropFromGlobalDict(glob_dict, mount_point):
   """Build an image property dictionary from the global dictionary.
 
@@ -643,7 +681,9 @@
     mount_point: such as "system", "data" etc.
   """
   d = {}
+  TryParseFingerprint(glob_dict)
 
+  d["timestamp"] = FIXED_FILE_TIMESTAMP
   if "build.prop" in glob_dict:
     timestamp = glob_dict["build.prop"].GetProp("ro.build.date.utc")
     if timestamp:
@@ -680,6 +720,7 @@
       "avb_enable",
       "avb_avbtool",
       "use_dynamic_partition_size",
+      "fingerprint",
   )
   for p in common_props:
     copy_prop(p, p)
@@ -870,10 +911,9 @@
           if item not in vbmeta_vendor.split()]
       vbmeta_partitions.append("vbmeta_vendor")
 
-
   partitions = {part: os.path.join(in_dir, part + ".img")
                 for part in vbmeta_partitions}
-  partitions = {part:path for (part, path) in partitions.items() if os.path.exists(path)}
+  partitions = {part: path for (part, path) in partitions.items() if os.path.exists(path)}
   common.BuildVBMeta(output_path, partitions, name, vbmeta_partitions)
 
 
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 97957be..b395c19 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
   """Verifies the payload and metadata signatures in an A/B OTA payload."""
   package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
   if 'payload.bin' not in package_zip.namelist():
-    package_zip.close()
+    common.ZipClose(package_zip)
     return
 
   print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
          '--in_file=' + payload_file,
          '--public_key=' + pubkey]
   common.RunAndCheckOutput(cmd)
-  package_zip.close()
+  common.ZipClose(package_zip)
 
   # Verified successfully upon reaching here.
   print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 8d16ca0..f92d67c 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -450,10 +450,7 @@
 
   @property
   def is_vabc(self):
-    vendor_prop = self.info_dict.get("vendor.build.prop")
-    vabc_enabled = vendor_prop and \
-        vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true"
-    return vabc_enabled
+    return self.info_dict.get("virtual_ab_compression") == "true"
 
   @property
   def is_android_r(self):
@@ -461,6 +458,25 @@
     return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
 
   @property
+  def vendor_api_level(self):
+    vendor_prop = self.info_dict.get("vendor.build.prop")
+    if not vendor_prop:
+      return -1
+
+    props = [
+        "ro.board.api_level",
+        "ro.board.first_api_level",
+        "ro.product.first_api_level",
+    ]
+    for prop in props:
+      value = vendor_prop.GetProp(prop)
+      try:
+        return int(value)
+      except:
+        pass
+    return -1
+
+  @property
   def is_vabc_xor(self):
     vendor_prop = self.info_dict.get("vendor.build.prop")
     vabc_xor_enabled = vendor_prop and \
@@ -698,26 +714,73 @@
       script.AssertOemProperty(prop, values, oem_no_mount)
 
 
-def ReadFromInputFile(input_file, fn):
-  """Reads the contents of fn from input zipfile or directory."""
+def DoesInputFileContain(input_file, fn):
+  """Check whether the input target_files.zip contain an entry `fn`"""
   if isinstance(input_file, zipfile.ZipFile):
-    return input_file.read(fn).decode()
+    return fn in input_file.namelist()
   elif zipfile.is_zipfile(input_file):
     with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
-      return zfp.read(fn).decode()
+      return fn in zfp.namelist()
+  else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+    path = os.path.join(input_file, *fn.split("/"))
+    return os.path.exists(path)
+
+
+def ReadBytesFromInputFile(input_file, fn):
+  """Reads the bytes of fn from input zipfile or directory."""
+  if isinstance(input_file, zipfile.ZipFile):
+    return input_file.read(fn)
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      return zfp.read(fn)
   else:
     if not os.path.isdir(input_file):
       raise ValueError(
           "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
     path = os.path.join(input_file, *fn.split("/"))
     try:
-      with open(path) as f:
+      with open(path, "rb") as f:
         return f.read()
     except IOError as e:
       if e.errno == errno.ENOENT:
         raise KeyError(fn)
 
 
+def ReadFromInputFile(input_file, fn):
+  """Reads the str contents of fn from input zipfile or directory."""
+  return ReadBytesFromInputFile(input_file, fn).decode()
+
+
+def WriteBytesToInputFile(input_file, fn, data):
+  """Write bytes |data| contents to fn of input zipfile or directory."""
+  if isinstance(input_file, zipfile.ZipFile):
+    with input_file.open(fn, "w") as entry_fp:
+      return entry_fp.write(data)
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      with zfp.open(fn, "w") as entry_fp:
+        return entry_fp.write(data)
+  else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+    path = os.path.join(input_file, *fn.split("/"))
+    try:
+      with open(path, "wb") as f:
+        return f.write(data)
+    except IOError as e:
+      if e.errno == errno.ENOENT:
+        raise KeyError(fn)
+
+
+def WriteToInputFile(input_file, fn, str: str):
+  """Write str content to fn of input file or directory"""
+  return WriteBytesToInputFile(input_file, fn, str.encode())
+
+
 def ExtractFromInputFile(input_file, fn):
   """Extracts the contents of fn from input zipfile or directory into a file."""
   if isinstance(input_file, zipfile.ZipFile):
@@ -865,20 +928,7 @@
         input_file, partition, ramdisk_format=ramdisk_format)
   d["build.prop"] = d["system.build.prop"]
 
-  # Set up the salt (based on fingerprint) that will be used when adding AVB
-  # hash / hashtree footers.
   if d.get("avb_enable") == "true":
-    build_info = BuildInfo(d, use_legacy_id=True)
-    for partition in PARTITIONS_WITH_BUILD_PROP:
-      fingerprint = build_info.GetPartitionFingerprint(partition)
-      if fingerprint:
-        d["avb_{}_salt".format(partition)] = sha256(
-            fingerprint.encode()).hexdigest()
-
-    # Set up the salt for partitions without build.prop
-    if build_info.fingerprint:
-      d["avb_salt"] = sha256(build_info.fingerprint.encode()).hexdigest()
-
     # Set the vbmeta digest if exists
     try:
       d["vbmeta_digest"] = read_helper("META/vbmeta_digest.txt").rstrip()
@@ -1357,11 +1407,8 @@
 def AppendAVBSigningArgs(cmd, partition):
   """Append signing arguments for avbtool."""
   # e.g., "--key path/to/signing_key --algorithm SHA256_RSA4096"
-  key_path = OPTIONS.info_dict.get("avb_" + partition + "_key_path")
-  if key_path and not os.path.exists(key_path) and OPTIONS.search_path:
-    new_key_path = os.path.join(OPTIONS.search_path, key_path)
-    if os.path.exists(new_key_path):
-      key_path = new_key_path
+  key_path = ResolveAVBSigningPathArgs(
+      OPTIONS.info_dict.get("avb_" + partition + "_key_path"))
   algorithm = OPTIONS.info_dict.get("avb_" + partition + "_algorithm")
   if key_path and algorithm:
     cmd.extend(["--key", key_path, "--algorithm", algorithm])
@@ -1371,6 +1418,32 @@
     cmd.extend(["--salt", avb_salt])
 
 
+def ResolveAVBSigningPathArgs(split_args):
+
+  def ResolveBinaryPath(path):
+    if os.path.exists(path):
+      return path
+    new_path = os.path.join(OPTIONS.search_path, path)
+    if os.path.exists(new_path):
+      return new_path
+    raise ExternalError(
+        "Failed to find {}".format(new_path))
+
+  if not split_args:
+    return split_args
+
+  if isinstance(split_args, list):
+    for index, arg in enumerate(split_args[:-1]):
+      if arg == '--signing_helper':
+        signing_helper_path = split_args[index + 1]
+        split_args[index + 1] = ResolveBinaryPath(signing_helper_path)
+        break
+  elif isinstance(split_args, str):
+    split_args = ResolveBinaryPath(split_args)
+
+  return split_args
+
+
 def GetAvbPartitionArg(partition, image, info_dict=None):
   """Returns the VBMeta arguments for partition.
 
@@ -1423,10 +1496,7 @@
   """
   if key is None:
     key = info_dict["avb_" + partition + "_key_path"]
-  if key and not os.path.exists(key) and OPTIONS.search_path:
-    new_key_path = os.path.join(OPTIONS.search_path, key)
-    if os.path.exists(new_key_path):
-      key = new_key_path
+  key = ResolveAVBSigningPathArgs(key)
   pubkey_path = ExtractAvbPublicKey(info_dict["avb_avbtool"], key)
   rollback_index_location = info_dict[
       "avb_" + partition + "_rollback_index_location"]
@@ -1442,10 +1512,7 @@
   key_path = OPTIONS.info_dict.get("gki_signing_key_path")
   algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
 
-  if not os.path.exists(key_path) and OPTIONS.search_path:
-    new_key_path = os.path.join(OPTIONS.search_path, key_path)
-    if os.path.exists(new_key_path):
-      key_path = new_key_path
+  key_path = ResolveAVBSigningPathArgs(key_path)
 
   # Checks key_path exists, before processing --gki_signing_* args.
   if not os.path.exists(key_path):
@@ -1505,7 +1572,8 @@
 
   custom_partitions = OPTIONS.info_dict.get(
       "avb_custom_images_partition_list", "").strip().split()
-  custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()]
+  custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get(
+      "avb_custom_vbmeta_images_partition_list", "").strip().split()]
 
   for partition, path in partitions.items():
     if partition not in needed_partitions:
@@ -1541,6 +1609,8 @@
             found = True
             break
         assert found, 'Failed to find {}'.format(chained_image)
+
+    split_args = ResolveAVBSigningPathArgs(split_args)
     cmd.extend(split_args)
 
   RunAndCheckOutput(cmd)
@@ -1751,7 +1821,8 @@
     AppendAVBSigningArgs(cmd, partition_name)
     args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
     if args and args.strip():
-      cmd.extend(shlex.split(args))
+      split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+      cmd.extend(split_args)
     RunAndCheckOutput(cmd)
 
   img.seek(os.SEEK_SET, 0)
@@ -1792,7 +1863,8 @@
     AppendAVBSigningArgs(cmd, partition_name)
     args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
     if args and args.strip():
-      cmd.extend(shlex.split(args))
+      split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+      cmd.extend(split_args)
     RunAndCheckOutput(cmd)
 
 
@@ -1867,7 +1939,7 @@
   data = _BuildBootableImage(prebuilt_name, os.path.join(unpack_dir, tree_subdir),
                              os.path.join(unpack_dir, fs_config),
                              os.path.join(unpack_dir, 'META/ramdisk_node_list')
-                                if dev_nodes else None,
+                             if dev_nodes else None,
                              info_dict, has_ramdisk, two_step_image)
   if data:
     return File(name, data)
@@ -1972,7 +2044,8 @@
     AppendAVBSigningArgs(cmd, partition_name)
     args = info_dict.get(f'avb_{partition_name}_add_hash_footer_args')
     if args and args.strip():
-      cmd.extend(shlex.split(args))
+      split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+      cmd.extend(split_args)
     RunAndCheckOutput(cmd)
 
   img.seek(os.SEEK_SET, 0)
@@ -2051,20 +2124,19 @@
         archvie. Non-matching patterns will be filtered out. If there's no match
         after the filtering, no file will be unzipped.
   """
-  cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
-  if patterns is not None:
+  with zipfile.ZipFile(filename, allowZip64=True, mode="r") as input_zip:
     # Filter out non-matching patterns. unzip will complain otherwise.
-    with zipfile.ZipFile(filename, allowZip64=True) as input_zip:
+    if patterns is not None:
       names = input_zip.namelist()
-    filtered = [
-        pattern for pattern in patterns if fnmatch.filter(names, pattern)]
+      filtered = [name for name in names if any(
+          [fnmatch.fnmatch(name, p) for p in patterns])]
 
-    # There isn't any matching files. Don't unzip anything.
-    if not filtered:
-      return
-    cmd.extend(filtered)
-
-  RunAndCheckOutput(cmd)
+      # There isn't any matching files. Don't unzip anything.
+      if not filtered:
+        return
+      input_zip.extractall(dirname, filtered)
+    else:
+      input_zip.extractall(dirname)
 
 
 def UnzipTemp(filename, patterns=None):
@@ -2121,9 +2193,7 @@
   if info_dict is None:
     info_dict = LoadInfoDict(input_zip)
 
-  is_sparse = info_dict.get("extfs_sparse_flag")
-  if info_dict.get(which + "_disable_sparse"):
-    is_sparse = False
+  is_sparse = IsSparseImage(os.path.join(tmpdir, "IMAGES", which + ".img"))
 
   # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
   # shared blocks (i.e. some blocks will show up in multiple files' block
@@ -2698,6 +2768,8 @@
 
 def Cleanup():
   for i in OPTIONS.tempfiles:
+    if not os.path.exists(i):
+      continue
     if os.path.isdir(i):
       shutil.rmtree(i, ignore_errors=True)
     else:
@@ -2811,6 +2883,18 @@
 def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
              compress_type=None):
 
+  # http://b/18015246
+  # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
+  # for files larger than 2GiB. We can work around this by adjusting their
+  # limit. Note that `zipfile.writestr()` will not work for strings larger than
+  # 2GiB. The Python interpreter sometimes rejects strings that large (though
+  # it isn't clear to me exactly what circumstances cause this).
+  # `zipfile.write()` must be used directly to work around this.
+  #
+  # This mess can be avoided if we port to python3.
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
   if compress_type is None:
     compress_type = zip_file.compression
   if arcname is None:
@@ -2836,13 +2920,14 @@
   finally:
     os.chmod(filename, saved_stat.st_mode)
     os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
+    zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
 def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
                 compress_type=None):
   """Wrap zipfile.writestr() function to work around the zip64 limit.
 
-  Python's zip implementation won't allow writing a string
+  Even with the ZIP64_LIMIT workaround, it won't allow writing a string
   longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
   when calling crc32(bytes).
 
@@ -2851,6 +2936,9 @@
   when we know the string won't be too long.
   """
 
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
   if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
     zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
     zinfo.compress_type = zip_file.compression
@@ -2883,6 +2971,7 @@
   zinfo.date_time = (2009, 1, 1, 0, 0, 0)
 
   zip_file.writestr(zinfo, data)
+  zipfile.ZIP64_LIMIT = saved_zip64_limit
 
 
 def ZipDelete(zip_filename, entries, force=False):
@@ -2911,10 +3000,21 @@
       cmd.append(entry)
     RunAndCheckOutput(cmd)
 
-
   os.replace(new_zipfile, zip_filename)
 
 
+def ZipClose(zip_file):
+  # http://b/18015246
+  # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
+  # central directory.
+  saved_zip64_limit = zipfile.ZIP64_LIMIT
+  zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
+  zip_file.close()
+
+  zipfile.ZIP64_LIMIT = saved_zip64_limit
+
+
 class DeviceSpecificParams(object):
   module = None
 
@@ -3607,11 +3707,13 @@
 
   else:
     system_root_image = info_dict.get("system_root_image") == "true"
+    include_recovery_dtbo = info_dict.get("include_recovery_dtbo") == "true"
+    include_recovery_acpio = info_dict.get("include_recovery_acpio") == "true"
     path = os.path.join(input_dir, recovery_resource_dat_path)
     # With system-root-image, boot and recovery images will have mismatching
     # entries (only recovery has the ramdisk entry) (Bug: 72731506). Use bsdiff
     # to handle such a case.
-    if system_root_image:
+    if system_root_image or include_recovery_dtbo or include_recovery_acpio:
       diff_program = ["bsdiff"]
       bonus_args = ""
       assert not os.path.exists(path)
@@ -4001,3 +4103,38 @@
     # Magic for android sparse image format
     # https://source.android.com/devices/bootloader/images
     return fp.read(4) == b'\x3A\xFF\x26\xED'
+
+
+def UnsparseImage(filepath, target_path=None):
+  if not IsSparseImage(filepath):
+    return
+  if target_path is None:
+    tmp_img = MakeTempFile(suffix=".img")
+    RunAndCheckOutput(["simg2img", filepath, tmp_img])
+    os.rename(tmp_img, filepath)
+  else:
+    RunAndCheckOutput(["simg2img", filepath, target_path])
+
+
+def ParseUpdateEngineConfig(path: str):
+  """Parse the update_engine config stored in file `path`
+  Args
+    path: Path to update_engine_config.txt file in target_files
+
+  Returns
+    A tuple of (major, minor) version number . E.g. (2, 8)
+  """
+  with open(path, "r") as fp:
+    # update_engine_config.txt is only supposed to contain two lines,
+    # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
+    # sufficient. If the length is more than that, something is wrong.
+    data = fp.read(1024)
+    major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
+    if not major:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
+    minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
+    if not minor:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
+    return (int(major.group(1)), int(minor.group(1)))
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
new file mode 100644
index 0000000..44f0a95
--- /dev/null
+++ b/tools/releasetools/create_brick_ota.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+from pathlib import Path
+import zipfile
+from typing import List
+import common
+import tempfile
+import shutil
+
+PARTITIONS_TO_WIPE = ["/dev/block/by-name/vbmeta",
+                      "/dev/block/by-name/vbmeta_a",
+                      "/dev/block/by-name/vbmeta_b",
+                      "/dev/block/by-name/vbmeta_system_a",
+                      "/dev/block/by-name/vbmeta_system_b",
+                      "/dev/block/by-name/boot",
+                      "/dev/block/by-name/boot_a",
+                      "/dev/block/by-name/boot_b",
+                      "/dev/block/by-name/vendor_boot",
+                      "/dev/block/by-name/vendor_boot_a",
+                      "/dev/block/by-name/vendor_boot_b",
+                      "/dev/block/by-name/init_boot_a",
+                      "/dev/block/by-name/init_boot_b",
+                      "/dev/block/by-name/metadata",
+                      "/dev/block/by-name/super",
+                      "/dev/block/by-name/userdata"]
+
+
+def CreateBrickOta(product_name: str, output_path: Path, extra_wipe_partitions: str, serialno: str):
+  partitions_to_wipe = PARTITIONS_TO_WIPE
+  if extra_wipe_partitions is not None:
+    partitions_to_wipe = PARTITIONS_TO_WIPE + extra_wipe_partitions.split(",")
+  # recovery requiers product name to be a | separated list
+  product_name = product_name.replace(",", "|")
+  with zipfile.ZipFile(output_path, "w") as zfp:
+    zfp.writestr("recovery.wipe", "\n".join(partitions_to_wipe))
+    zfp.writestr("payload.bin", "")
+    zfp.writestr("META-INF/com/android/metadata", "\n".join(
+        ["ota-type=BRICK", "post-timestamp=9999999999", "pre-device=" + product_name, "serialno=" + serialno]))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Android Brick OTA generator')
+  parser.add_argument('otafile', metavar='PAYLOAD', type=str,
+                      help='The output OTA package file.')
+  parser.add_argument('--product', type=str,
+                      help='The product name of the device, for example, bramble, redfin. This can be a comma separated list.', required=True)
+  parser.add_argument('--serialno', type=str,
+                      help='The serial number of devices that are allowed to install this OTA package. This can be a comma separated list.')
+  parser.add_argument('--extra_wipe_partitions', type=str,
+                      help='Additional partitions on device which should be wiped.')
+  parser.add_argument('-v', action="store_true",
+                      help="Enable verbose logging", dest="verbose")
+  parser.add_argument('--package_key', type=str,
+                      help='Paths to private key for signing payload')
+  parser.add_argument('--search_path', type=str,
+                      help='Search path for framework/signapk.jar')
+  parser.add_argument('--private_key_suffix', type=str,
+                      help='Suffix to be appended to package_key path', default=".pk8")
+  args = parser.parse_args(argv[1:])
+  if args.search_path:
+    common.OPTIONS.search_path = args.search_path
+  if args.verbose:
+    common.OPTIONS.verbose = args.verbose
+  CreateBrickOta(args.product, args.otafile,
+                 args.extra_wipe_partitions, args.serialno)
+  if args.package_key:
+    common.OPTIONS.private_key_suffix = args.private_key_suffix
+    with tempfile.NamedTemporaryFile() as tmpfile:
+      common.SignFile(args.otafile, tmpfile.name,
+                      args.package_key, None, whole_file=True)
+      shutil.copy(tmpfile.name, args.otafile)
+
+
+if __name__ == "__main__":
+  import sys
+  main(sys.argv)
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index f8bdd81..fa53ad2 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -64,7 +64,7 @@
 OPTIONS.retrofit_dap = None
 OPTIONS.build_super = None
 OPTIONS.sparse_userimages = None
-
+OPTIONS.use_fastboot_info = False
 
 def LoadOptions(input_file):
   """Loads information from input_file to OPTIONS.
@@ -119,12 +119,18 @@
   entries = [
       'OTA/android-info.txt:android-info.txt',
   ]
+  if OPTIONS.use_fastboot_info:
+    entries.append('META/fastboot-info.txt:fastboot-info.txt')
   with zipfile.ZipFile(input_file) as input_zip:
     namelist = input_zip.namelist()
+  if 'PREBUILT_IMAGES/kernel_16k' in namelist:
+    entries.append('PREBUILT_IMAGES/kernel_16k:kernel_16k')
+  if 'PREBUILT_IMAGES/ramdisk_16k.img' in namelist:
+    entries.append('PREBUILT_IMAGES/ramdisk_16k.img:ramdisk_16k.img')
 
   for image_path in [name for name in namelist if name.startswith('IMAGES/')]:
     image = os.path.basename(image_path)
-    if OPTIONS.bootable_only and image not in('boot.img', 'recovery.img', 'bootloader', 'init_boot.img'):
+    if OPTIONS.bootable_only and image not in ('boot.img', 'recovery.img', 'bootloader', 'init_boot.img'):
       continue
     if not image.endswith('.img') and image != 'bootloader':
       continue
@@ -172,8 +178,8 @@
 
   logger.info('Writing super.img to archive...')
   with zipfile.ZipFile(
-      output_file, 'a', compression=zipfile.ZIP_DEFLATED,
-      allowZip64=True) as output_zip:
+          output_file, 'a', compression=zipfile.ZIP_DEFLATED,
+          allowZip64=True) as output_zip:
     common.ZipWrite(output_zip, super_file, 'super.img')
 
 
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index 3288ef7..b61f039 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -29,6 +29,7 @@
 import merge_utils
 import sparse_img
 import verity_utils
+from ota_utils import ParseUpdateEngineConfig
 
 from common import ExternalError
 
@@ -52,28 +53,6 @@
 MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
 
 
-def ParseUpdateEngineConfig(path: str):
-  """Parse the update_engine config stored in file `path`
-  Args
-    path: Path to update_engine_config.txt file in target_files
-
-  Returns
-    A tuple of (major, minor) version number . E.g. (2, 8)
-  """
-  with open(path, "r") as fp:
-    # update_engine_config.txt is only supposed to contain two lines,
-    # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
-    # sufficient. If the length is more than that, something is wrong.
-    data = fp.read(1024)
-    major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
-    if not major:
-      raise ValueError(
-          f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
-    minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
-    if not minor:
-      raise ValueError(
-          f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
-    return (int(major.group(1)), int(minor.group(1)))
 
 
 def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
@@ -99,16 +78,16 @@
   """Merges various files in META/*."""
 
   framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=os.path.dirname(framework_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   vendor_meta_dir = os.path.join(temp_dir, 'vendor_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=os.path.dirname(vendor_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   merged_meta_dir = os.path.join(merged_dir, 'META')
 
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 54122b0..d8f7b15 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -26,9 +26,9 @@
 
 Usage: merge_target_files [args]
 
-  --framework-target-files framework-target-files-zip-archive
+  --framework-target-files framework-target-files-package
       The input target files package containing framework bits. This is a zip
-      archive.
+      archive or a directory.
 
   --framework-item-list framework-item-list-file
       The optional path to a newline-separated config file of items that
@@ -38,9 +38,9 @@
       The optional path to a newline-separated config file of keys to
       extract from the framework META/misc_info.txt file.
 
-  --vendor-target-files vendor-target-files-zip-archive
+  --vendor-target-files vendor-target-files-package
       The input target files package containing vendor bits. This is a zip
-      archive.
+      archive or a directory.
 
   --vendor-item-list vendor-item-list-file
       The optional path to a newline-separated config file of items that
@@ -165,6 +165,26 @@
     pass
 
 
+def include_extra_in_list(item_list):
+  """
+  1. Include all `META/*` files in the item list.
+
+  To ensure that `AddImagesToTargetFiles` can still be used with vendor item
+  list that do not specify all of the required META/ files, those files should
+  be included by default. This preserves the backward compatibility of
+  `rebuild_image_with_sepolicy`.
+
+  2. Include `SYSTEM/build.prop` file in the item list.
+
+  To ensure that `AddImagesToTargetFiles` for GRF vendor images, can still
+  access SYSTEM/build.prop to pass GetPartitionFingerprint check in BuildInfo
+  constructor.
+  """
+  if not item_list:
+    return None
+  return list(item_list) + ['META/*'] + ['SYSTEM/build.prop']
+
+
 def create_merged_package(temp_dir):
   """Merges two target files packages into one target files structure.
 
@@ -172,18 +192,18 @@
     Path to merged package under temp directory.
   """
   # Extract "as is" items from the input framework and vendor partial target
-  # files packages directly into the output temporary directory, since these items
-  # do not need special case processing.
+  # files packages directly into the output temporary directory, since these
+  # items do not need special case processing.
 
   output_target_files_temp_dir = os.path.join(temp_dir, 'output')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.framework_item_list)
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+      item_list=OPTIONS.framework_item_list)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.vendor_item_list)
+      item_list=OPTIONS.vendor_item_list)
 
   # Perform special case processing on META/* items.
   # After this function completes successfully, all the files we need to create
@@ -231,7 +251,8 @@
   def copy_selinux_file(input_path, output_filename):
     input_filename = os.path.join(target_files_dir, input_path)
     if not os.path.exists(input_filename):
-      input_filename = input_filename.replace('SYSTEM_EXT/', 'SYSTEM/system_ext/') \
+      input_filename = input_filename.replace('SYSTEM_EXT/',
+                                              'SYSTEM/system_ext/') \
           .replace('PRODUCT/', 'SYSTEM/product/')
       if not os.path.exists(input_filename):
         logger.info('Skipping copy_selinux_file for %s', input_filename)
@@ -272,7 +293,10 @@
   vendor_target_files_dir = common.MakeTempDir(
       prefix='merge_target_files_vendor_target_files_')
   common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
-  common.UnzipToDir(OPTIONS.vendor_target_files, vendor_target_files_dir)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
+      output_dir=vendor_target_files_dir,
+      item_list=include_extra_in_list(OPTIONS.vendor_item_list))
 
   # Copy the partition contents from the merged target-files archive to the
   # vendor target-files archive.
@@ -303,8 +327,9 @@
   shutil.move(
       os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
       os.path.join(target_files_dir, 'IMAGES', partition_img))
-  move_only_exists(os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
-        os.path.join(target_files_dir, 'IMAGES', partition_map))
+  move_only_exists(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+      os.path.join(target_files_dir, 'IMAGES', partition_map))
 
   def copy_recovery_file(filename):
     for subdir in ('VENDOR', 'SYSTEM/vendor'):
@@ -578,10 +603,10 @@
     common.Usage(__doc__)
     sys.exit(1)
 
-  with zipfile.ZipFile(OPTIONS.framework_target_files, allowZip64=True) as fz:
-    framework_namelist = fz.namelist()
-  with zipfile.ZipFile(OPTIONS.vendor_target_files, allowZip64=True) as vz:
-    vendor_namelist = vz.namelist()
+  framework_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.framework_target_files)
+  vendor_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.vendor_target_files)
 
   if OPTIONS.framework_item_list:
     OPTIONS.framework_item_list = common.LoadListFromFile(
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index e056195..b5683a8 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -49,28 +49,80 @@
   common.UnzipToDir(input_zip, output_dir, filtered_extract_item_list)
 
 
-def CopyItems(from_dir, to_dir, patterns):
-  """Similar to ExtractItems() except uses an input dir instead of zip."""
-  file_paths = []
-  for dirpath, _, filenames in os.walk(from_dir):
-    file_paths.extend(
-        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
-        for filename in filenames)
+def CopyItems(from_dir, to_dir, copy_item_list):
+  """Copies the items in copy_item_list from source to destination directory.
 
-  filtered_file_paths = set()
-  for pattern in patterns:
-    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
+  copy_item_list may include files and directories. Will copy the matched
+  files and create the matched directories.
 
-  for file_path in filtered_file_paths:
-    original_file_path = os.path.join(from_dir, file_path)
-    copied_file_path = os.path.join(to_dir, file_path)
-    copied_file_dir = os.path.dirname(copied_file_path)
-    if not os.path.exists(copied_file_dir):
-      os.makedirs(copied_file_dir)
-    if os.path.islink(original_file_path):
-      os.symlink(os.readlink(original_file_path), copied_file_path)
+  Args:
+    from_dir: The source directory.
+    to_dir: The destination directory.
+    copy_item_list: Items to be copied.
+  """
+  item_paths = []
+  for root, dirs, files in os.walk(from_dir):
+    item_paths.extend(
+        os.path.relpath(path=os.path.join(root, item_name), start=from_dir)
+        for item_name in files + dirs)
+
+  filtered = set()
+  for pattern in copy_item_list:
+    filtered.update(fnmatch.filter(item_paths, pattern))
+
+  for item in filtered:
+    original_path = os.path.join(from_dir, item)
+    copied_path = os.path.join(to_dir, item)
+    copied_parent_path = os.path.dirname(copied_path)
+    if not os.path.exists(copied_parent_path):
+      os.makedirs(copied_parent_path)
+    if os.path.islink(original_path):
+      os.symlink(os.readlink(original_path), copied_path)
+    elif os.path.isdir(original_path):
+      if not os.path.exists(copied_path):
+        os.makedirs(copied_path)
     else:
-      shutil.copyfile(original_file_path, copied_file_path)
+      shutil.copyfile(original_path, copied_path)
+
+
+def GetTargetFilesItems(target_files_zipfile_or_dir):
+  """Gets a list of target files items."""
+  if zipfile.is_zipfile(target_files_zipfile_or_dir):
+    with zipfile.ZipFile(target_files_zipfile_or_dir, allowZip64=True) as fz:
+      return fz.namelist()
+  elif os.path.isdir(target_files_zipfile_or_dir):
+    item_list = []
+    for root, dirs, files in os.walk(target_files_zipfile_or_dir):
+      item_list.extend(
+          os.path.relpath(path=os.path.join(root, item),
+                          start=target_files_zipfile_or_dir)
+          for item in dirs + files)
+    return item_list
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
+
+
+def CollectTargetFiles(input_zipfile_or_dir, output_dir, item_list=None):
+  """Extracts input zipfile or copy input directory to output directory.
+
+  Extracts the input zipfile if `input_zipfile_or_dir` is a zip archive, or
+  copies the items if `input_zipfile_or_dir` is a directory.
+
+  Args:
+    input_zipfile_or_dir: The input target files, could be either a zipfile to
+      extract or a directory to copy.
+    output_dir: The output directory that the input files are either extracted
+      or copied.
+    item_list: Files to be extracted or copied. Will extract or copy all files
+      if omitted.
+  """
+  patterns = item_list if item_list else ('*',)
+  if zipfile.is_zipfile(input_zipfile_or_dir):
+    ExtractItems(input_zipfile_or_dir, output_dir, patterns)
+  elif os.path.isdir(input_zipfile_or_dir):
+    CopyItems(input_zipfile_or_dir, output_dir, patterns)
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
 
 
 def WriteSortedData(data, path):
@@ -129,6 +181,7 @@
 
 _PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
 _IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
+_PREBUILT_IMAGE_PARTITION_PATTERN = re.compile(r'^PREBUILT_IMAGES/(.*)\.img$')
 
 
 def ItemListToPartitionSet(item_list):
@@ -151,12 +204,12 @@
   partition_set = set()
 
   for item in item_list:
-    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN, _PREBUILT_IMAGE_PARTITION_PATTERN):
       partition_match = pattern.search(item.strip())
       if partition_match:
         partition = partition_match.group(1).lower()
         # These directories in target-files are not actual partitions.
-        if partition not in ('meta', 'images'):
+        if partition not in ('meta', 'images', 'prebuilt_images'):
           partition_set.add(partition)
 
   return partition_set
@@ -165,7 +218,7 @@
 # Partitions that are grabbed from the framework partial build by default.
 _FRAMEWORK_PARTITIONS = {
     'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
-    'vbmeta_system'
+    'vbmeta_system', 'pvmfw'
 }
 
 
@@ -201,7 +254,7 @@
     if partition == 'meta':
       continue
 
-    if partition == 'images':
+    if partition in ('images', 'prebuilt_images'):
       image_partition, extension = os.path.splitext(os.path.basename(namelist))
       if image_partition == 'vbmeta':
         # Always regenerate vbmeta.img since it depends on hash information
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index 1ae1f54..b4c47ae 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -35,22 +35,27 @@
       open(path, 'a').close()
       return path
 
+    def createEmptyFolder(path):
+      os.makedirs(path)
+      return path
+
     def createSymLink(source, dest):
       os.symlink(source, dest)
       return dest
 
     def getRelPaths(start, filepaths):
       return set(
-          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
+          os.path.relpath(path=filepath, start=start)
+          for filepath in filepaths)
 
     input_dir = common.MakeTempDir()
     output_dir = common.MakeTempDir()
     expected_copied_items = []
     actual_copied_items = []
-    patterns = ['*.cpp', 'subdir/*.txt']
+    patterns = ['*.cpp', 'subdir/*.txt', 'subdir/empty_dir']
 
-    # Create various files that we expect to get copied because they
-    # match one of the patterns.
+    # Create various files and empty directories that we expect to get copied
+    # because they match one of the patterns.
     expected_copied_items.extend([
         createEmptyFile(os.path.join(input_dir, 'a.cpp')),
         createEmptyFile(os.path.join(input_dir, 'b.cpp')),
@@ -58,6 +63,7 @@
         createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
         createEmptyFile(
             os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
+        createEmptyFolder(os.path.join(input_dir, 'subdir', 'empty_dir')),
         createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
     ])
     # Create some more files that we expect to not get copied.
@@ -70,9 +76,13 @@
     merge_utils.CopyItems(input_dir, output_dir, patterns)
 
     # Assert the actual copied items match the ones we expected.
-    for dirpath, _, filenames in os.walk(output_dir):
+    for root_dir, dirs, files in os.walk(output_dir):
       actual_copied_items.extend(
-          os.path.join(dirpath, filename) for filename in filenames)
+          os.path.join(root_dir, filename) for filename in files)
+      for dirname in dirs:
+        dir_path = os.path.join(root_dir, dirname)
+        if not os.listdir(dir_path):
+          actual_copied_items.append(dir_path)
     self.assertEqual(
         getRelPaths(output_dir, actual_copied_items),
         getRelPaths(input_dir, expected_copied_items))
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
index 7d3d3a3..441312c 100644
--- a/tools/releasetools/merge_ota.py
+++ b/tools/releasetools/merge_ota.py
@@ -14,6 +14,7 @@
 
 import argparse
 import logging
+import shlex
 import struct
 import sys
 import update_payload
@@ -34,6 +35,7 @@
 logger = logging.getLogger(__name__)
 
 CARE_MAP_ENTRY = "care_map.pb"
+APEX_INFO_ENTRY = "apex_info.pb"
 
 
 def WriteDataBlob(payload: Payload, outfp: BinaryIO, read_size=1024*64):
@@ -188,6 +190,22 @@
               f"OTA {partition_to_ota[part].name} and {payload.name} have duplicating partition {part}")
         partition_to_ota[part] = payload
 
+def ApexInfo(file_paths):
+  if len(file_paths) > 1:
+    logger.info("More than one target file specified, will ignore "
+                "apex_info.pb (if any)")
+    return None
+  with zipfile.ZipFile(file_paths[0], "r", allowZip64=True) as zfp:
+    if APEX_INFO_ENTRY in zfp.namelist():
+      apex_info_bytes = zfp.read(APEX_INFO_ENTRY)
+      return apex_info_bytes
+  return None
+
+def ParseSignerArgs(args):
+  if args is None:
+    return None
+  return shlex.split(args)
+
 def main(argv):
   parser = argparse.ArgumentParser(description='Merge multiple partial OTAs')
   parser.add_argument('packages', type=str, nargs='+',
@@ -196,6 +214,13 @@
                       help='Paths to private key for signing payload')
   parser.add_argument('--search_path', type=str,
                       help='Search path for framework/signapk.jar')
+  parser.add_argument('--payload_signer', type=str,
+                      help='Path to custom payload signer')
+  parser.add_argument('--payload_signer_args', type=ParseSignerArgs,
+                      help='Arguments for payload signer if necessary')
+  parser.add_argument('--payload_signer_maximum_signature_size', type=str,
+                      help='Maximum signature size (in bytes) that would be '
+                      'generated by the given payload signer')
   parser.add_argument('--output', type=str,
                       help='Paths to output merged ota', required=True)
   parser.add_argument('--metadata_ota', type=str,
@@ -203,6 +228,9 @@
   parser.add_argument('--private_key_suffix', type=str,
                       help='Suffix to be appended to package_key path', default=".pk8")
   parser.add_argument('-v', action="store_true", help="Enable verbose logging", dest="verbose")
+  parser.epilog = ('This tool can also be used to resign a regular OTA. For a single regular OTA, '
+                   'apex_info.pb will be written to output. When merging multiple OTAs, '
+                   'apex_info.pb will not be written.')
   args = parser.parse_args(argv[1:])
   file_paths = args.packages
 
@@ -225,6 +253,13 @@
 
   merged_manifest = MergeManifests(payloads)
 
+  # Get signing keys
+  key_passwords = common.GetKeyPasswords([args.package_key])
+
+  generator = PayloadGenerator()
+
+  apex_info_bytes = ApexInfo(file_paths)
+
   with tempfile.NamedTemporaryFile() as unsigned_payload:
     WriteHeaderAndManifest(merged_manifest, unsigned_payload)
     ConcatBlobs(payloads, unsigned_payload)
@@ -236,20 +271,31 @@
 
     if args.package_key:
       logger.info("Signing payload...")
-      signer = PayloadSigner(args.package_key, args.private_key_suffix)
+      # TODO: remove OPTIONS when no longer used as fallback in payload_signer
+      common.OPTIONS.payload_signer_args = None
+      common.OPTIONS.payload_signer_maximum_signature_size = None
+      signer = PayloadSigner(args.package_key, args.private_key_suffix,
+                             key_passwords[args.package_key],
+                             payload_signer=args.payload_signer,
+                             payload_signer_args=args.payload_signer_args,
+                             payload_signer_maximum_signature_size=args.payload_signer_maximum_signature_size)
       generator.payload_file = unsigned_payload.name
       generator.Sign(signer)
 
     logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
 
     logger.info("Writing to %s", args.output)
+
     key_passwords = common.GetKeyPasswords([args.package_key])
     with tempfile.NamedTemporaryFile(prefix="signed_ota", suffix=".zip") as signed_ota:
       with zipfile.ZipFile(signed_ota, "w") as zfp:
         generator.WriteToZip(zfp)
         care_map_bytes = MergeCareMap(args.packages)
         if care_map_bytes:
-          zfp.writestr(CARE_MAP_ENTRY, care_map_bytes)
+          common.ZipWriteStr(zfp, CARE_MAP_ENTRY, care_map_bytes)
+        if apex_info_bytes:
+          logger.info("Writing %s", APEX_INFO_ENTRY)
+          common.ZipWriteStr(zfp, APEX_INFO_ENTRY, apex_info_bytes)
       AddOtaMetadata(signed_ota.name, metadata_ota,
                      args.output, args.package_key, key_passwords[args.package_key])
   return 0
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 7078d67..667891c 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -23,6 +23,7 @@
 from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition
 from common import OPTIONS
 from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles
+import subprocess
 
 logger = logging.getLogger(__name__)
 
@@ -272,12 +273,13 @@
 
   # We haven't written the metadata entry, which will be done in
   # FinalizeMetadata.
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   needed_property_files, package_key=OPTIONS.package_key)
 
 
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
@@ -526,13 +528,14 @@
 
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   # Sign the generated zip package unless no_signing is specified.
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+  FinalizeMetadata(metadata, staging_file, output_file,
+                   needed_property_files, package_key=OPTIONS.package_key)
 
 
 def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
@@ -555,8 +558,18 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.input_tmp = OPTIONS.extracted_input
   else:
-    logger.info("unzipping target target-files...")
-    OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+    if not os.path.isdir(target_file):
+      logger.info("unzipping target target-files...")
+      OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+    else:
+      OPTIONS.input_tmp = target_file
+      tmpfile = common.MakeTempFile(suffix=".zip")
+      os.unlink(tmpfile)
+      common.RunAndCheckOutput(
+          ["zip", tmpfile, "-r", ".", "-0"], cwd=target_file)
+      assert zipfile.is_zipfile(tmpfile)
+      target_file = tmpfile
+
   OPTIONS.target_tmp = OPTIONS.input_tmp
 
   # If the caller explicitly specified the device-specific extensions path via
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 97fbd51..4c0d391 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -248,6 +248,9 @@
 
   --security_patch_level
       Override the security patch level in target files
+
+  --max_threads
+      Specify max number of threads allowed when generating A/B OTA
 """
 
 from __future__ import print_function
@@ -267,10 +270,9 @@
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
-from common import IsSparseImage
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
+from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
-from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
 from payload_signer import PayloadSigner
 
@@ -321,6 +323,8 @@
 OPTIONS.enable_lz4diff = False
 OPTIONS.vabc_compression_param = None
 OPTIONS.security_patch_level = None
+OPTIONS.max_threads = None
+
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -423,6 +427,13 @@
   slot will be used. This is to ensure that we always have valid boot, vbmeta,
   bootloader images in the inactive slot.
 
+  After writing system_other to inactive slot's system partiiton,
+  PackageManagerService will read `ro.cp_system_other_odex`, and set
+  `sys.cppreopt` to "requested". Then, according to
+  system/extras/cppreopts/cppreopts.rc , init will mount system_other at
+  /postinstall, and execute `cppreopts` to copy optimized APKs from
+  /postinstall to /data .
+
   Args:
     input_file: The input target-files.zip file.
     skip_postinstall: Whether to skip copying the postinstall config file.
@@ -488,7 +499,7 @@
       else:
         common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
 
-  target_zip.close()
+  common.ZipClose(target_zip)
 
   return target_file
 
@@ -507,20 +518,14 @@
   Returns:
     The filename of target-files.zip that doesn't contain postinstall config.
   """
-  # We should only make a copy if postinstall_config entry exists.
-  with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
-    if POSTINSTALL_CONFIG not in input_zip.namelist():
-      return input_file
-
-  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
-  shutil.copyfile(input_file, target_file)
-  common.ZipDelete(target_file, POSTINSTALL_CONFIG)
-  return target_file
+  config_path = os.path.join(input_file, POSTINSTALL_CONFIG)
+  if os.path.exists(config_path):
+    os.unlink(config_path)
+  return input_file
 
 
 def ParseInfoDict(target_file_path):
-  with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
-    return common.LoadInfoDict(zfp)
+  return common.LoadInfoDict(target_file_path)
 
 
 def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
@@ -532,6 +537,17 @@
   Returns:
     The path to modified target-files.zip
   """
+  if os.path.isdir(input_file):
+    dynamic_partition_info_path = os.path.join(
+        input_file, "META", "dynamic_partitions_info.txt")
+    with open(dynamic_partition_info_path, "r") as fp:
+      dynamic_partition_info = fp.read()
+    dynamic_partition_info = ModifyVABCCompressionParam(
+        dynamic_partition_info, vabc_compression_param)
+    with open(dynamic_partition_info_path, "w") as fp:
+      fp.write(dynamic_partition_info)
+    return input_file
+
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   shutil.copyfile(input_file, target_file)
   common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
@@ -559,23 +575,7 @@
     The filename of target-files.zip used for partial ota update.
   """
 
-  def AddImageForPartition(partition_name):
-    """Add the archive name for a given partition to the copy list."""
-    for prefix in ['IMAGES', 'RADIO']:
-      image_path = '{}/{}.img'.format(prefix, partition_name)
-      if image_path in namelist:
-        copy_entries.append(image_path)
-        map_path = '{}/{}.map'.format(prefix, partition_name)
-        if map_path in namelist:
-          copy_entries.append(map_path)
-        return
-
-    raise ValueError("Cannot find {} in input zipfile".format(partition_name))
-
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    original_ab_partitions = input_zip.read(
-        AB_PARTITIONS).decode().splitlines()
-    namelist = input_zip.namelist()
+  original_ab_partitions = common.ReadFromInputFile(input_file, AB_PARTITIONS)
 
   unrecognized_partitions = [partition for partition in ab_partitions if
                              partition not in original_ab_partitions]
@@ -584,50 +584,65 @@
                      unrecognized_partitions)
 
   logger.info("Generating partial updates for %s", ab_partitions)
+  for subdir in ["IMAGES", "RADIO", "PREBUILT_IMAGES"]:
+    image_dir = os.path.join(subdir)
+    if not os.path.exists(image_dir):
+      continue
+    for filename in os.listdir(image_dir):
+      filepath = os.path.join(image_dir, filename)
+      if filename.endswith(".img"):
+        partition_name = filename.removesuffix(".img")
+        if partition_name not in ab_partitions:
+          os.unlink(filepath)
 
-  copy_entries = ['META/update_engine_config.txt']
-  for partition_name in ab_partitions:
-    AddImageForPartition(partition_name)
+  common.WriteToInputFile(input_file, 'META/ab_partitions.txt',
+                          '\n'.join(ab_partitions))
+  CARE_MAP_ENTRY = "META/care_map.pb"
+  if DoesInputFileContain(input_file, CARE_MAP_ENTRY):
+    caremap = care_map_pb2.CareMap()
+    caremap.ParseFromString(
+        common.ReadBytesFromInputFile(input_file, CARE_MAP_ENTRY))
+    filtered = [
+        part for part in caremap.partitions if part.name in ab_partitions]
+    del caremap.partitions[:]
+    caremap.partitions.extend(filtered)
+    common.WriteBytesToInputFile(input_file, CARE_MAP_ENTRY,
+                                 caremap.SerializeToString())
 
-  # Use zip2zip to avoid extracting the zipfile.
-  partial_target_file = common.MakeTempFile(suffix='.zip')
-  cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file]
-  cmd.extend(['{}:{}'.format(name, name) for name in copy_entries])
-  common.RunAndCheckOutput(cmd)
+  for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
+    if not DoesInputFileContain(input_file, info_file):
+      logger.warning('Cannot find %s in input zipfile', info_file)
+      continue
 
-  partial_target_zip = zipfile.ZipFile(partial_target_file, 'a',
-                                       allowZip64=True)
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
-                       '\n'.join(ab_partitions))
-    CARE_MAP_ENTRY = "META/care_map.pb"
-    if CARE_MAP_ENTRY in input_zip.namelist():
-      caremap = care_map_pb2.CareMap()
-      caremap.ParseFromString(input_zip.read(CARE_MAP_ENTRY))
-      filtered = [
-          part for part in caremap.partitions if part.name in ab_partitions]
-      del caremap.partitions[:]
-      caremap.partitions.extend(filtered)
-      common.ZipWriteStr(partial_target_zip, CARE_MAP_ENTRY,
-                         caremap.SerializeToString())
+    content = common.ReadFromInputFile(input_file, info_file)
+    modified_info = UpdatesInfoForSpecialUpdates(
+        content, lambda p: p in ab_partitions)
+    if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
+      modified_info = ModifyVABCCompressionParam(
+          modified_info, OPTIONS.vabc_compression_param)
+    common.WriteToInputFile(input_file, info_file, modified_info)
 
-    for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
-      if info_file not in input_zip.namelist():
-        logger.warning('Cannot find %s in input zipfile', info_file)
-        continue
-      content = input_zip.read(info_file).decode()
-      modified_info = UpdatesInfoForSpecialUpdates(
-          content, lambda p: p in ab_partitions)
-      if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
-        modified_info = ModifyVABCCompressionParam(
-            modified_info, OPTIONS.vabc_compression_param)
-      common.ZipWriteStr(partial_target_zip, info_file, modified_info)
+  def IsInPartialList(postinstall_line: str):
+    idx = postinstall_line.find("=")
+    if idx < 0:
+      return False
+    key = postinstall_line[:idx]
+    logger.info("%s %s", key, ab_partitions)
+    for part in ab_partitions:
+      if key.endswith("_" + part):
+        return True
+    return False
 
-    # TODO(xunchang) handle META/postinstall_config.txt'
+  postinstall_config = common.ReadFromInputFile(input_file, POSTINSTALL_CONFIG)
+  postinstall_config = [
+      line for line in postinstall_config.splitlines() if IsInPartialList(line)]
+  if postinstall_config:
+    postinstall_config = "\n".join(postinstall_config)
+    common.WriteToInputFile(input_file, POSTINSTALL_CONFIG, postinstall_config)
+  else:
+    os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
 
-  partial_target_zip.close()
-
-  return partial_target_file
+  return input_file
 
 
 def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
@@ -652,21 +667,12 @@
   replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
              for dev in super_block_devices}
 
-  target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
-  shutil.copyfile(input_file, target_file)
-
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    namelist = input_zip.namelist()
-
-  input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
-
   # Remove partitions from META/ab_partitions.txt that is in
   # dynamic_partition_list but not in super_block_devices so that
   # brillo_update_payload won't generate update for those logical partitions.
-  ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
-  with open(ab_partitions_file) as f:
-    ab_partitions_lines = f.readlines()
-    ab_partitions = [line.strip() for line in ab_partitions_lines]
+  ab_partitions_lines = common.ReadFromInputFile(
+      input_file, AB_PARTITIONS).split("\n")
+  ab_partitions = [line.strip() for line in ab_partitions_lines]
   # Assert that all super_block_devices are in ab_partitions
   super_device_not_updated = [partition for partition in super_block_devices
                               if partition not in ab_partitions]
@@ -674,15 +680,6 @@
       "{} is in super_block_devices but not in {}".format(
           super_device_not_updated, AB_PARTITIONS)
   # ab_partitions -= (dynamic_partition_list - super_block_devices)
-  new_ab_partitions = common.MakeTempFile(
-      prefix="ab_partitions", suffix=".txt")
-  with open(new_ab_partitions, 'w') as f:
-    for partition in ab_partitions:
-      if (partition in dynamic_partition_list and
-              partition not in super_block_devices):
-        logger.info("Dropping %s from ab_partitions.txt", partition)
-        continue
-      f.write(partition + "\n")
   to_delete = [AB_PARTITIONS]
 
   # Always skip postinstall for a retrofit update.
@@ -695,24 +692,28 @@
   # Remove the existing partition images as well as the map files.
   to_delete += list(replace.values())
   to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
-
-  common.ZipDelete(target_file, to_delete)
-
-  target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
+  for item in to_delete:
+    os.unlink(os.path.join(input_file, item))
 
   # Write super_{foo}.img as {foo}.img.
   for src, dst in replace.items():
-    assert src in namelist, \
+    assert DoesInputFileContain(input_file, src), \
         'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
-    unzipped_file = os.path.join(input_tmp, *src.split('/'))
-    common.ZipWrite(target_zip, unzipped_file, arcname=dst)
+    source_path = os.path.join(input_file, *src.split("/"))
+    target_path = os.path.join(input_file, *dst.split("/"))
+    os.rename(source_path, target_path)
 
   # Write new ab_partitions.txt file
-  common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
+  new_ab_partitions = os.paht.join(input_file, AB_PARTITIONS)
+  with open(new_ab_partitions, 'w') as f:
+    for partition in ab_partitions:
+      if (partition in dynamic_partition_list and
+              partition not in super_block_devices):
+        logger.info("Dropping %s from ab_partitions.txt", partition)
+        continue
+      f.write(partition + "\n")
 
-  target_zip.close()
-
-  return target_file
+  return input_file
 
 
 def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images):
@@ -727,30 +728,34 @@
 
   Returns:
     The filename of a target-files.zip which has renamed the custom images in
-    the IMAGS/ to their partition names.
+    the IMAGES/ to their partition names.
   """
-  # Use zip2zip to avoid extracting the zipfile.
+
+  # First pass: use zip2zip to copy the target files contents, excluding
+  # the "custom" images that will be replaced.
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   cmd = ['zip2zip', '-i', input_file, '-o', target_file]
 
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    namelist = input_zip.namelist()
-
-  # Write {custom_image}.img as {custom_partition}.img.
+  images = {}
   for custom_partition, custom_image in custom_images.items():
     default_custom_image = '{}.img'.format(custom_partition)
     if default_custom_image != custom_image:
-      logger.info("Update custom partition '%s' with '%s'",
-                  custom_partition, custom_image)
-      # Default custom image need to be deleted first.
-      namelist.remove('IMAGES/{}'.format(default_custom_image))
-      # IMAGES/{custom_image}.img:IMAGES/{custom_partition}.img.
-      cmd.extend(['IMAGES/{}:IMAGES/{}'.format(custom_image,
-                                               default_custom_image)])
+      src = 'IMAGES/' + custom_image
+      dst = 'IMAGES/' + default_custom_image
+      cmd.extend(['-x', dst])
+      images[dst] = src
 
-  cmd.extend(['{}:{}'.format(name, name) for name in namelist])
   common.RunAndCheckOutput(cmd)
 
+  # Second pass: write {custom_image}.img as {custom_partition}.img.
+  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+    with zipfile.ZipFile(target_file, 'a', allowZip64=True) as output_zip:
+      for dst, src in images.items():
+        data = input_zip.read(src)
+        logger.info("Update custom partition '%s'", dst)
+        common.ZipWriteStr(output_zip, dst, data)
+      output_zip.close()
+
   return target_file
 
 
@@ -817,8 +822,20 @@
   return pattern.search(output) is not None
 
 
+def ExtractOrCopyTargetFiles(target_file):
+  if os.path.isdir(target_file):
+    return CopyTargetFilesDir(target_file)
+  else:
+    return ExtractTargetFiles(target_file)
+
+
 def GenerateAbOtaPackage(target_file, output_file, source_file=None):
   """Generates an Android OTA package that has A/B update payload."""
+  # If input target_files are directories, create a copy so that we can modify
+  # them directly
+  target_file = ExtractOrCopyTargetFiles(target_file)
+  if source_file is not None:
+    source_file = ExtractOrCopyTargetFiles(source_file)
   # Stage the output zip package for package signing.
   if not OPTIONS.no_signing:
     staging_file = common.MakeTempFile(suffix='.zip')
@@ -829,6 +846,7 @@
                                allowZip64=True)
 
   if source_file is not None:
+    source_file = ExtractTargetFiles(source_file)
     assert "ab_partitions" in OPTIONS.source_info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -893,6 +911,24 @@
           (source_info is not None and not source_info.is_vabc_xor):
     logger.info("VABC XOR Not supported, disabling")
     OPTIONS.enable_vabc_xor = False
+
+  if OPTIONS.vabc_compression_param == "none":
+    logger.info(
+        "VABC Compression algorithm is set to 'none', disabling VABC xor")
+    OPTIONS.enable_vabc_xor = False
+
+  if OPTIONS.enable_vabc_xor:
+    api_level = -1
+    if source_info is not None:
+      api_level = source_info.vendor_api_level
+    if api_level == -1:
+      api_level = target_info.vendor_api_level
+
+    # XOR is only supported on T and higher.
+    if api_level < 33:
+      logger.error("VABC XOR not supported on this vendor, disabling")
+      OPTIONS.enable_vabc_xor = False
+
   additional_args = []
 
   # Prepare custom images.
@@ -907,23 +943,23 @@
   elif OPTIONS.partial:
     target_file = GetTargetFilesZipForPartialUpdates(target_file,
                                                      OPTIONS.partial)
-    additional_args += ["--is_partial_update", "true"]
-  elif OPTIONS.vabc_compression_param:
+  if OPTIONS.vabc_compression_param:
     target_file = GetTargetFilesZipForCustomVABCCompression(
         target_file, OPTIONS.vabc_compression_param)
-  elif OPTIONS.skip_postinstall:
+  if OPTIONS.skip_postinstall:
     target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
   # Target_file may have been modified, reparse ab_partitions
-  with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
-    target_info.info_dict['ab_partitions'] = zfp.read(
-        AB_PARTITIONS).decode().strip().split("\n")
+  target_info.info_dict['ab_partitions'] = common.ReadFromInputFile(target_file,
+                                                                    AB_PARTITIONS).strip().split("\n")
 
+  from check_target_files_vintf import CheckVintfIfTrebleEnabled
   CheckVintfIfTrebleEnabled(target_file, target_info)
 
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
+  payload = PayloadGenerator(
+      wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -950,7 +986,10 @@
 
   additional_args += ["--security_patch_level", security_patch_level]
 
-  additional_args += ["--enable_zucchini",
+  if OPTIONS.max_threads:
+    additional_args += ["--max_threads", OPTIONS.max_threads]
+
+  additional_args += ["--enable_zucchini=" +
                       str(OPTIONS.enable_zucchini).lower()]
 
   if not ota_utils.IsLz4diffCompatible(source_file, target_file):
@@ -958,7 +997,7 @@
         "Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
     OPTIONS.enable_lz4diff = False
 
-  additional_args += ["--enable_lz4diff",
+  additional_args += ["--enable_lz4diff=" +
                       str(OPTIONS.enable_lz4diff).lower()]
 
   if source_file and OPTIONS.enable_lz4diff:
@@ -974,20 +1013,13 @@
     additional_args += ["--erofs_compression_param", erofs_compression_param]
 
   if OPTIONS.disable_vabc:
-    additional_args += ["--disable_vabc", "true"]
+    additional_args += ["--disable_vabc=true"]
   if OPTIONS.enable_vabc_xor:
-    additional_args += ["--enable_vabc_xor", "true"]
-  if OPTIONS.force_minor_version:
-    additional_args += ["--force_minor_version", OPTIONS.force_minor_version]
+    additional_args += ["--enable_vabc_xor=true"]
   if OPTIONS.compressor_types:
     additional_args += ["--compressor_types", OPTIONS.compressor_types]
   additional_args += ["--max_timestamp", max_timestamp]
 
-  if SupportsMainlineGkiUpdates(source_file):
-    logger.warning(
-        "Detected build with mainline GKI, include full boot image.")
-    additional_args.extend(["--full_boot", "true"])
-
   payload.Generate(
       target_file,
       source_file,
@@ -1020,34 +1052,32 @@
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
-  target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
   if target_info.get("avb_enable") == "true":
-    care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
-                     "META/" + x in target_zip.namelist()]
-
     # Adds care_map if either the protobuf format or the plain text one exists.
-    if care_map_list:
-      care_map_name = care_map_list[0]
-      care_map_data = target_zip.read("META/" + care_map_name)
+    for care_map_name in ["care_map.pb", "care_map.txt"]:
+      if not DoesInputFileContain(target_file, "META/" + care_map_name):
+        continue
+      care_map_data = common.ReadBytesFromInputFile(
+          target_file, "META/" + care_map_name)
       # In order to support streaming, care_map needs to be packed as
       # ZIP_STORED.
       common.ZipWriteStr(output_zip, care_map_name, care_map_data,
                          compress_type=zipfile.ZIP_STORED)
+      # break here to avoid going into else when care map has been handled
+      break
     else:
       logger.warning("Cannot find care map file in target_file package")
 
   # Add the source apex version for incremental ota updates, and write the
   # result apex info to the ota package.
-  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_zip, source_file)
+  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_file, source_file)
   if ota_apex_info is not None:
     common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
                        compress_type=zipfile.ZIP_STORED)
 
-  target_zip.close()
-
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
-  output_zip.close()
+  common.ZipClose(output_zip)
 
   FinalizeMetadata(metadata, staging_file, output_file,
                    package_key=OPTIONS.package_key)
@@ -1167,6 +1197,12 @@
       OPTIONS.vabc_compression_param = a.lower()
     elif o == "--security_patch_level":
       OPTIONS.security_patch_level = a
+    elif o in ("--max_threads"):
+      if a.isdigit():
+        OPTIONS.max_threads = a
+      else:
+        raise ValueError("Cannot parse value %r for option %r - only "
+                         "integers are allowed." % (a, o))
     else:
       return False
     return True
@@ -1218,6 +1254,7 @@
                                  "enable_lz4diff=",
                                  "vabc_compression_param=",
                                  "security_patch_level=",
+                                 "max_threads=",
                              ], extra_option_handler=option_handler)
   common.InitLogging()
 
@@ -1235,7 +1272,7 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    OPTIONS.info_dict = ParseInfoDict(args[0])
+    OPTIONS.info_dict = common.LoadInfoDict(args[0])
 
   if OPTIONS.wipe_user_data:
     if not OPTIONS.vabc_downgrade:
@@ -1348,7 +1385,8 @@
           "what(even if data wipe is done), so SPL downgrade on any "
           "release-keys build is not allowed.".format(target_spl, source_spl))
 
-    logger.info("SPL downgrade on %s", target_build_prop.GetProp("ro.build.tags"))
+    logger.info("SPL downgrade on %s",
+                target_build_prop.GetProp("ro.build.tags"))
     if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
       raise common.ExternalError(
           "Target security patch level {} is older than source SPL {} applying "
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index e2ce31d..466cafb 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,10 +22,11 @@
 
 import ota_metadata_pb2
 import common
-from common import (ZipDelete, OPTIONS, MakeTempFile,
+import fnmatch
+from common import (ZipDelete, DoesInputFileContain, ReadBytesFromInputFile, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
-                    GetRamdiskFormat)
+                    GetRamdiskFormat, ParseUpdateEngineConfig)
 from payload_signer import PayloadSigner
 
 
@@ -44,8 +45,10 @@
 
 METADATA_NAME = 'META-INF/com/android/metadata'
 METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
-UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*',
+                 'RADIO/*', '*/build.prop', '*/default.prop', '*/build.default', "*/etc/vintf/*"]
 SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
+TARGET_FILES_IMAGES_SUBDIR = ["IMAGES", "PREBUILT_IMAGES", "RADIO"]
 
 
 def FinalizeMetadata(metadata, input_file, output_file, needed_property_files=None, package_key=None, pw=None):
@@ -135,7 +138,8 @@
     logger.info(f"Signing disabled for output file {output_file}")
     shutil.copy(prelim_signing, output_file)
   else:
-    logger.info(f"Signing the output file {output_file} with key {package_key}")
+    logger.info(
+        f"Signing the output file {output_file} with key {package_key}")
     SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
@@ -625,12 +629,10 @@
   """If applicable, add the source version to the apex info."""
 
   def _ReadApexInfo(input_zip):
-    if "META/apex_info.pb" not in input_zip.namelist():
+    if not DoesInputFileContain(input_zip, "META/apex_info.pb"):
       logger.warning("target_file doesn't contain apex_info.pb %s", input_zip)
       return None
-
-    with input_zip.open("META/apex_info.pb", "r") as zfp:
-      return zfp.read()
+    return ReadBytesFromInputFile(input_zip, "META/apex_info.pb")
 
   target_apex_string = _ReadApexInfo(target_zip)
   # Return early if the target apex info doesn't exist or is empty.
@@ -641,8 +643,7 @@
   if not source_file:
     return target_apex_string
 
-  with zipfile.ZipFile(source_file, "r", allowZip64=True) as source_zip:
-    source_apex_string = _ReadApexInfo(source_zip)
+  source_apex_string = _ReadApexInfo(source_file)
   if not source_apex_string:
     return target_apex_string
 
@@ -721,6 +722,54 @@
   return sourceEntry and targetEntry and sourceEntry == targetEntry
 
 
+def ExtractTargetFiles(path: str):
+  if os.path.isdir(path):
+    logger.info("target files %s is already extracted", path)
+    return path
+  extracted_dir = common.MakeTempDir("target_files")
+  common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN + [""])
+  for subdir in TARGET_FILES_IMAGES_SUBDIR:
+    image_dir = os.path.join(extracted_dir, subdir)
+    if not os.path.exists(image_dir):
+      continue
+    for filename in os.listdir(image_dir):
+      if not filename.endswith(".img"):
+        continue
+      common.UnsparseImage(os.path.join(image_dir, filename))
+
+  return extracted_dir
+
+
+def LocatePartitionPath(target_files_dir: str, partition: str, allow_empty):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".img")
+  if os.path.exists(path):
+    return path
+  path = os.path.join(target_files_dir, "IMAGES", partition + ".img")
+  if os.path.exists(path):
+    return path
+  if allow_empty:
+    return ""
+  raise common.ExternalError(
+      "Partition {} not found in target files {}".format(partition, target_files_dir))
+
+
+def GetPartitionImages(target_files_dir: str, ab_partitions, allow_empty=True):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionPath(target_files_dir, partition, allow_empty) for partition in ab_partitions])
+
+
+def LocatePartitionMap(target_files_dir: str, partition: str):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".map")
+  if os.path.exists(path):
+    return path
+  return ""
+
+
+def GetPartitionMaps(target_files_dir: str, ab_partitions):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionMap(target_files_dir, partition) for partition in ab_partitions])
+
+
 class PayloadGenerator(object):
   """Manages the creation and the signing of an A/B OTA Payload."""
 
@@ -729,7 +778,7 @@
   SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
   SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
 
-  def __init__(self, secondary=False, wipe_user_data=False):
+  def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
     """Initializes a Payload instance.
 
     Args:
@@ -739,6 +788,8 @@
     self.payload_properties = None
     self.secondary = secondary
     self.wipe_user_data = wipe_user_data
+    self.minor_version = minor_version
+    self.is_partial_update = is_partial_update
 
   def _Run(self, cmd):  # pylint: disable=no-self-use
     # Don't pipe (buffer) the output if verbose is set. Let
@@ -757,21 +808,61 @@
       source_file: The filename of the source build target-files zip; or None if
           generating a full OTA.
       additional_args: A list of additional args that should be passed to
-          brillo_update_payload script; or None.
+          delta_generator binary; or None.
     """
     if additional_args is None:
       additional_args = []
 
     payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-    cmd = ["brillo_update_payload", "generate",
-           "--payload", payload_file,
-           "--target_image", target_file]
+    target_dir = ExtractTargetFiles(target_file)
+    cmd = ["delta_generator",
+           "--out_file", payload_file]
+    with open(os.path.join(target_dir, "META", "ab_partitions.txt"), "r") as fp:
+      ab_partitions = fp.read().strip().splitlines()
+    cmd.extend(["--partition_names", ":".join(ab_partitions)])
+    cmd.extend(
+        ["--new_partitions", GetPartitionImages(target_dir, ab_partitions, False)])
+    cmd.extend(
+        ["--new_mapfiles", GetPartitionMaps(target_dir, ab_partitions)])
     if source_file is not None:
-      cmd.extend(["--source_image", source_file])
+      source_dir = ExtractTargetFiles(source_file)
+      cmd.extend(
+          ["--old_partitions", GetPartitionImages(source_dir, ab_partitions, True)])
+      cmd.extend(
+          ["--old_mapfiles", GetPartitionMaps(source_dir, ab_partitions)])
+
       if OPTIONS.disable_fec_computation:
-        cmd.extend(["--disable_fec_computation", "true"])
+        cmd.extend(["--disable_fec_computation=true"])
       if OPTIONS.disable_verity_computation:
-        cmd.extend(["--disable_verity_computation", "true"])
+        cmd.extend(["--disable_verity_computation=true"])
+    postinstall_config = os.path.join(
+        target_dir, "META", "postinstall_config.txt")
+
+    if os.path.exists(postinstall_config):
+      cmd.extend(["--new_postinstall_config_file", postinstall_config])
+    dynamic_partition_info = os.path.join(
+        target_dir, "META", "dynamic_partitions_info.txt")
+
+    if os.path.exists(dynamic_partition_info):
+      cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
+
+    apex_info = os.path.join(
+      target_dir, "META", "apex_info.pb")
+    if os.path.exists(apex_info):
+      cmd.extend(["--apex_info_file", apex_info])
+
+    major_version, minor_version = ParseUpdateEngineConfig(
+        os.path.join(target_dir, "META", "update_engine_config.txt"))
+    if source_file:
+      major_version, minor_version = ParseUpdateEngineConfig(
+          os.path.join(source_dir, "META", "update_engine_config.txt"))
+    if self.minor_version:
+      minor_version = self.minor_version
+    cmd.extend(["--major_version", str(major_version)])
+    if source_file is not None or self.is_partial_update:
+      cmd.extend(["--minor_version", str(minor_version)])
+    if self.is_partial_update:
+      cmd.extend(["--is_partial_update=true"])
     cmd.extend(additional_args)
     self._Run(cmd)
 
@@ -963,3 +1054,38 @@
     assert metadata_total <= payload_size
 
     return (payload_offset, metadata_total)
+
+
+def Fnmatch(filename, pattersn):
+  return any([fnmatch.fnmatch(filename, pat) for pat in pattersn])
+
+
+def CopyTargetFilesDir(input_dir):
+  output_dir = common.MakeTempDir("target_files")
+
+  def SymlinkIfNotSparse(src, dst):
+    if common.IsSparseImage(src):
+      return common.UnsparseImage(src, dst)
+    else:
+      return os.link(src, dst)
+
+  for subdir in TARGET_FILES_IMAGES_SUBDIR:
+    if not os.path.exists(os.path.join(input_dir, subdir)):
+      continue
+    shutil.copytree(os.path.join(input_dir, subdir), os.path.join(
+        output_dir, subdir), dirs_exist_ok=True, copy_function=SymlinkIfNotSparse)
+  shutil.copytree(os.path.join(input_dir, "META"), os.path.join(
+      output_dir, "META"), dirs_exist_ok=True)
+
+  for (dirpath, _, filenames) in os.walk(input_dir):
+    for filename in filenames:
+      path = os.path.join(dirpath, filename)
+      relative_path = path.removeprefix(input_dir).removeprefix("/")
+      if not Fnmatch(relative_path, UNZIP_PATTERN):
+        continue
+      if filename.endswith(".prop") or filename == "prop.default" or "/etc/vintf/" in relative_path:
+        target_path = os.path.join(
+            output_dir, relative_path)
+        os.makedirs(os.path.dirname(target_path), exist_ok=True)
+        shutil.copy(path, target_path)
+  return output_dir
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index 4f342ac..9933aef 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -36,11 +36,16 @@
   (OPTIONS.package_key) and calls openssl for the signing works.
   """
 
-  def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None):
+  def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None,
+               payload_signer_args=None, payload_signer_maximum_signature_size=None):
     if package_key is None:
       package_key = OPTIONS.package_key
     if private_key_suffix is None:
       private_key_suffix = OPTIONS.private_key_suffix
+    if payload_signer_args is None:
+      payload_signer_args = OPTIONS.payload_signer_args
+    if payload_signer_maximum_signature_size is None:
+      payload_signer_maximum_signature_size = OPTIONS.payload_signer_maximum_signature_size
 
     if payload_signer is None:
       # Prepare the payload signing key.
@@ -59,10 +64,10 @@
           signing_key)
     else:
       self.signer = payload_signer
-      self.signer_args = OPTIONS.payload_signer_args
-      if OPTIONS.payload_signer_maximum_signature_size:
+      self.signer_args = payload_signer_args
+      if payload_signer_maximum_signature_size:
         self.maximum_signature_size = int(
-            OPTIONS.payload_signer_maximum_signature_size)
+            payload_signer_maximum_signature_size)
       else:
         # The legacy config uses RSA2048 keys.
         logger.warning("The maximum signature size for payload signer is not"
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4a12e74..8291448 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -908,7 +908,7 @@
   certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
   for k in keys:
     common.ZipWrite(certs_zip, k)
-  certs_zip.close()
+  common.ZipClose(certs_zip)
   common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
 
 
@@ -1545,8 +1545,8 @@
                      platform_api_level, codename_to_api_level_map,
                      compressed_extension)
 
-  input_zip.close()
-  output_zip.close()
+  common.ZipClose(input_zip)
+  common.ZipClose(output_zip)
 
   if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
     BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/test_check_target_files_vintf.py b/tools/releasetools/test_check_target_files_vintf.py
index 8725dd6..7c154d7 100644
--- a/tools/releasetools/test_check_target_files_vintf.py
+++ b/tools/releasetools/test_check_target_files_vintf.py
@@ -15,6 +15,7 @@
 #
 
 import os.path
+import shutil
 
 import common
 import test_utils
@@ -86,6 +87,28 @@
 
     return test_dir
 
+  # Prepare test dir with required HAL for APEX testing
+  def prepare_apex_test_dir(self, test_delta_rel_path):
+    test_dir = self.prepare_test_dir(test_delta_rel_path)
+    write_string_to_file(
+        """<compatibility-matrix version="1.0" level="1" type="framework">
+            <hal format="aidl" optional="false" updatable-via-apex="true">
+                <name>android.apex.foo</name>
+                <version>1</version>
+                <interface>
+                    <name>IApex</name>
+                    <instance>default</instance>
+                </interface>
+            </hal>
+            <sepolicy>
+                <sepolicy-version>0.0</sepolicy-version>
+                <kernel-sepolicy-version>0</kernel-sepolicy-version>
+            </sepolicy>
+        </compatibility-matrix>""",
+        os.path.join(test_dir, 'SYSTEM/etc/vintf/compatibility_matrix.1.xml'))
+
+    return test_dir
+
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_CheckVintf_skeleton(self):
     msg = 'vintf check with skeleton target files failed.'
@@ -143,3 +166,25 @@
                          os.path.join(test_dir, 'VENDOR/etc/vintf/manifest.xml'))
     # Should raise an error because a file has invalid format.
     self.assertRaises(common.ExternalError, CheckVintf, test_dir)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_compat(self):
+    apex_file_name = 'com.android.apex.vendor.foo.with_vintf.apex'
+    msg = 'vintf/apex_compat should be compatible because ' \
+          'APEX %s has the required HALs' % (apex_file_name)
+    test_dir = self.prepare_apex_test_dir('vintf/apex_compat')
+    # Copy APEX under VENDOR/apex
+    apex_file = os.path.join(test_utils.get_current_dir(), apex_file_name)
+    apex_dir = os.path.join(test_dir, 'VENDOR/apex')
+    os.makedirs(apex_dir)
+    shutil.copy(apex_file, apex_dir)
+    # Should find required HAL via APEX
+    self.assertTrue(CheckVintf(test_dir), msg=msg)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_incompat(self):
+    msg = 'vintf/apex_incompat should be incompatible because ' \
+          'no APEX data'
+    test_dir = self.prepare_apex_test_dir('vintf/apex_incompat')
+    # Should not find required HAL
+    self.assertFalse(CheckVintf(test_dir), msg=msg)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 8c9655ad0..2dfd8c7 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
     info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
     build_info = common.BuildInfo(info_dict)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/build-id/'
-        'version-incremental:build-type/build-tags', build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/build-id/'
+      'version-incremental:build-type/build-tags', build_info.fingerprint)
 
     build_props = info_dict['build.prop'].build_props
     del build_props['ro.build.id']
     build_props['ro.build.legacy.id'] = 'legacy-build-id'
     build_info = common.BuildInfo(info_dict, use_legacy_id=True)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/'
-        'legacy-build-id/version-incremental:build-type/build-tags',
-        build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/'
+      'legacy-build-id/version-incremental:build-type/build-tags',
+      build_info.fingerprint)
 
     self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
                       False)
@@ -241,9 +241,9 @@
     info_dict['vbmeta_digest'] = 'abcde12345'
     build_info = common.BuildInfo(info_dict, use_legacy_id=False)
     self.assertEqual(
-        'product-brand/product-name/product-device:version-release/'
-        'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
-        build_info.fingerprint)
+      'product-brand/product-name/product-device:version-release/'
+      'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+      build_info.fingerprint)
 
   def test___getitem__(self):
     target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
     info_dict['build.prop'].build_props[
         'ro.product.property_source_order'] = 'bad-source'
     with self.assertRaisesRegexp(common.ExternalError,
-                                 'Invalid ro.product.property_source_order'):
+        'Invalid ro.product.property_source_order'):
       info = common.BuildInfo(info_dict, None)
       info.GetBuildProp('ro.product.device')
 
@@ -452,14 +452,16 @@
         test_file.write(bytes(data))
       test_file.close()
 
-      expected_stat = os.stat(test_file_name)
       expected_mode = extra_zipwrite_args.get("perms", 0o644)
       expected_compress_type = extra_zipwrite_args.get("compress_type",
                                                        zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
+      expected_stat = os.stat(test_file_name)
       common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
                    test_file_name, expected_stat, expected_mode,
@@ -480,8 +482,6 @@
     try:
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
-
       if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
         arcname = zinfo_or_arcname
         expected_mode = extra_args.get("perms", 0o644)
@@ -494,7 +494,7 @@
         expected_mode = extra_args.get("perms", zinfo_perms)
 
       common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
                    expected_mode=expected_mode,
@@ -528,15 +528,17 @@
         test_file.write(data)
       test_file.close()
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
       expected_stat = os.stat(test_file_name)
       expected_mode = 0o644
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
       common.ZipWrite(zip_file, test_file_name, **extra_args)
       common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       # Verify the contents written by ZipWrite().
       self._verify(zip_file, zip_file_name, arcname_large,
@@ -551,6 +553,12 @@
       os.remove(zip_file_name)
       os.remove(test_file_name)
 
+  def _test_reset_ZIP64_LIMIT(self, func, *args):
+    default_limit = (1 << 31) - 1
+    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+    func(*args)
+    self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+
   def test_ZipWrite(self):
     file_contents = os.urandom(1024)
     self._test_ZipWrite(file_contents)
@@ -575,7 +583,7 @@
     })
 
   def test_ZipWrite_resets_ZIP64_LIMIT(self):
-    self._test_ZipWrite("")
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
 
   def test_ZipWriteStr(self):
     random_string = os.urandom(1024)
@@ -626,9 +634,9 @@
     })
 
   def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
-    self._test_ZipWriteStr('foo', b'')
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
     zinfo = zipfile.ZipInfo(filename="foo")
-    self._test_ZipWriteStr(zinfo, b'')
+    self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
 
   def test_bug21309935(self):
     zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -650,7 +658,7 @@
       zinfo = zipfile.ZipInfo(filename="qux")
       zinfo.external_attr = 0o700 << 16
       common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
-      zip_file.close()
+      common.ZipClose(zip_file)
 
       self._verify(zip_file, zip_file_name, "foo",
                    sha1(random_string).hexdigest(),
@@ -677,7 +685,7 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
       common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
-      output_zip.close()
+      common.ZipClose(output_zip)
     zip_file.close()
 
     try:
@@ -725,8 +733,8 @@
       common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
       common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
       common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
-      output_zip.close()
-    output_zip.close()
+      common.ZipClose(output_zip)
+    common.ZipClose(output_zip)
     return zip_file
 
   @test_utils.SkipIfExternalToolsUnavailable()
@@ -813,9 +821,9 @@
   )
 
   APKCERTS_CERTMAP1 = {
-      'RecoveryLocalizer.apk': 'certs/devkey',
-      'Settings.apk': 'build/make/target/product/security/platform',
-      'TV.apk': 'PRESIGNED',
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/make/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
   }
 
   APKCERTS_TXT2 = (
@@ -830,10 +838,10 @@
   )
 
   APKCERTS_CERTMAP2 = {
-      'Compressed1.apk': 'certs/compressed1',
-      'Compressed2a.apk': 'certs/compressed2',
-      'Compressed2b.apk': 'certs/compressed2',
-      'Compressed3.apk': 'certs/compressed3',
+      'Compressed1.apk' : 'certs/compressed1',
+      'Compressed2a.apk' : 'certs/compressed2',
+      'Compressed2b.apk' : 'certs/compressed2',
+      'Compressed3.apk' : 'certs/compressed3',
   }
 
   APKCERTS_TXT3 = (
@@ -842,7 +850,7 @@
   )
 
   APKCERTS_CERTMAP3 = {
-      'Compressed4.apk': 'certs/compressed4',
+      'Compressed4.apk' : 'certs/compressed4',
   }
 
   # Test parsing with no optional fields, both optional fields, and only the
@@ -859,9 +867,9 @@
   )
 
   APKCERTS_CERTMAP4 = {
-      'RecoveryLocalizer.apk': 'certs/devkey',
-      'Settings.apk': 'build/make/target/product/security/platform',
-      'TV.apk': 'PRESIGNED',
+      'RecoveryLocalizer.apk' : 'certs/devkey',
+      'Settings.apk' : 'build/make/target/product/security/platform',
+      'TV.apk' : 'PRESIGNED',
   }
 
   def setUp(self):
@@ -965,7 +973,7 @@
     extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
     extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
     with open(extracted_from_privkey, 'rb') as privkey_fp, \
-            open(extracted_from_pubkey, 'rb') as pubkey_fp:
+        open(extracted_from_pubkey, 'rb') as pubkey_fp:
       self.assertEqual(privkey_fp.read(), pubkey_fp.read())
 
   def test_ParseCertificate(self):
@@ -1229,8 +1237,7 @@
     self.assertEqual(
         '1-5 9-10',
         sparse_image.file_map['//system/file1'].extra['text_str'])
-    self.assertTrue(
-        sparse_image.file_map['//system/file2'].extra['incomplete'])
+    self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
     self.assertTrue(
         sparse_image.file_map['/system/app/file3'].extra['incomplete'])
 
@@ -1338,7 +1345,7 @@
       'recovery_api_version': 3,
       'fstab_version': 2,
       'system_root_image': 'true',
-      'no_recovery': 'true',
+      'no_recovery' : 'true',
       'recovery_as_boot': 'true',
   }
 
@@ -1659,7 +1666,6 @@
     self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
                       test_file.name, 'generic_kernel')
 
-
 class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
   """Checks the format of install-recovery.sh.
 
@@ -1669,7 +1675,7 @@
   def setUp(self):
     self._tempdir = common.MakeTempDir()
     # Create a fake dict that contains the fstab info for boot&recovery.
-    self._info = {"fstab": {}}
+    self._info = {"fstab" : {}}
     fake_fstab = [
         "/dev/soc.0/by-name/boot /boot emmc defaults defaults",
         "/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2016,11 +2022,11 @@
           input_zip, 'odm', placeholder_values)
 
     self.assertEqual({
-        'ro.odm.build.date.utc': '1578430045',
-        'ro.odm.build.fingerprint':
-        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-        'ro.product.odm.device': 'coral',
-        'ro.product.odm.name': 'product1',
+      'ro.odm.build.date.utc': '1578430045',
+      'ro.odm.build.fingerprint':
+      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+      'ro.product.odm.device': 'coral',
+      'ro.product.odm.name': 'product1',
     }, partition_props.build_props)
 
     with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2203,8 +2209,8 @@
 
     copied_props = copy.deepcopy(partition_props)
     self.assertEqual({
-        'ro.odm.build.date.utc': '1578430045',
-        'ro.odm.build.fingerprint':
-        'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
-        'ro.product.odm.device': 'coral',
+      'ro.odm.build.date.utc': '1578430045',
+      'ro.odm.build.fingerprint':
+      'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+      'ro.product.odm.device': 'coral',
     }, copied_props.build_props)
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 755241d..7caeed4 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -31,6 +31,7 @@
 import common
 import sparse_img
 from rangelib import RangeSet
+from hashlib import sha256
 
 logger = logging.getLogger(__name__)
 
@@ -42,6 +43,7 @@
 MAX_VBMETA_SIZE = 64 * 1024
 MAX_FOOTER_SIZE = 4096
 
+
 class BuildVerityImageError(Exception):
   """An Exception raised during verity image building."""
 
@@ -64,6 +66,11 @@
   # partition_size could be None at this point, if using dynamic partitions.
   if partition_size:
     partition_size = int(partition_size)
+  # Set up the salt (based on fingerprint) that will be used when adding AVB
+  # hash / hashtree footers.
+  salt = prop_dict.get("avb_salt")
+  if salt is None:
+    salt = sha256(prop_dict.get("fingerprint", "").encode()).hexdigest()
 
   # Verified Boot 2.0
   if (prop_dict.get("avb_hash_enable") == "true" or
@@ -81,7 +88,7 @@
           prop_dict["avb_avbtool"],
           key_path,
           algorithm,
-          prop_dict.get("avb_salt"),
+          salt,
           prop_dict["avb_add_hash_footer_args"])
 
     # Image uses hashtree footer.
@@ -92,7 +99,7 @@
         prop_dict["avb_avbtool"],
         key_path,
         algorithm,
-        prop_dict.get("avb_salt"),
+        salt,
         prop_dict["avb_add_hashtree_footer_args"])
 
   return None
@@ -141,11 +148,7 @@
     self.footer_type = footer_type
     self.avbtool = avbtool
     self.algorithm = algorithm
-    self.key_path = key_path
-    if key_path and not os.path.exists(key_path) and OPTIONS.search_path:
-      new_key_path = os.path.join(OPTIONS.search_path, key_path)
-      if os.path.exists(new_key_path):
-        self.key_path = new_key_path
+    self.key_path = common.ResolveAVBSigningPathArgs(key_path)
 
     self.salt = salt
     self.signing_args = signing_args
@@ -283,7 +286,7 @@
 
 
 def CreateCustomImageBuilder(info_dict, partition_name, partition_size,
-                            key_path, algorithm, signing_args):
+                             key_path, algorithm, signing_args):
   builder = None
   if info_dict.get("avb_enable") == "true":
     builder = VerifiedBootVersion2VerityImageBuilder(
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
new file mode 100644
index 0000000..4837dde
--- /dev/null
+++ b/tools/sbom/Android.bp
@@ -0,0 +1,57 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+    name: "generate-sbom",
+    srcs: [
+        "generate-sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+        "sbom_lib",
+    ],
+}
+
+python_library_host {
+    name: "sbom_lib",
+    srcs: [
+        "sbom_data.py",
+        "sbom_writers.py",
+    ],
+}
+
+python_test_host {
+    name: "sbom_writers_test",
+    main: "sbom_writers_test.py",
+    srcs: [
+        "sbom_writers_test.py",
+    ],
+    data: [
+        "testdata/*",
+    ],
+    libs: [
+        "sbom_lib",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
new file mode 100755
index 0000000..2415f7e
--- /dev/null
+++ b/tools/sbom/generate-sbom.py
@@ -0,0 +1,574 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+                   --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+                   --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+                   --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+SOONG_PREBUILT_MODULE_TYPES = [
+  'android_app_import',
+  'android_library_import',
+  'cc_prebuilt_binary',
+  'cc_prebuilt_library',
+  'cc_prebuilt_library_headers',
+  'cc_prebuilt_library_shared',
+  'cc_prebuilt_library_static',
+  'cc_prebuilt_object',
+  'dex_import',
+  'java_import',
+  'java_sdk_library_import',
+  'java_system_modules_import',
+  'libclang_rt_prebuilt_library_static',
+  'libclang_rt_prebuilt_library_shared',
+  'llvm_prebuilt_library_static',
+  'ndk_prebuilt_object',
+  'ndk_prebuilt_shared_stl',
+  'nkd_prebuilt_static_stl',
+  'prebuilt_apex',
+  'prebuilt_bootclasspath_fragment',
+  'prebuilt_dsp',
+  'prebuilt_firmware',
+  'prebuilt_kernel_modules',
+  'prebuilt_rfsa',
+  'prebuilt_root',
+  'rust_prebuilt_dylib',
+  'rust_prebuilt_library',
+  'rust_prebuilt_rlib',
+  'vndk_prebuilt_shared',
+]
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+  parser.add_argument('--unbundled_apk', action='store_true', default=False, help='Generate SBOM for unbundled APKs')
+  parser.add_argument('--unbundled_apex', action='store_true', default=False, help='Generate SBOM for unbundled APEXs')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def encode_for_spdxid(s):
+  """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+  result = ''
+  for c in s:
+    if c.isalnum() or c in '.-':
+      result += c
+    elif c in '_@/':
+      result += '-'
+    else:
+      result += '0x' + c.encode('utf-8').hex()
+
+  return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def checksum(file_path):
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return (file_metadata['soong_module_type'] and
+          file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES)
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  """Return source package info exists in its METADATA file, currently including name, security tag
+  and external SBOM reference.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe23Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe22Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+  module path or kernel module's source path if the installed file is a kernel module.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  """Search for METADATA file of a package and return its path."""
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  """Return a package's version in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  """Return a package's homepage URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  """Return a package's code repository URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+  package, a UPSTREAM package and an external SBOM document reference if sbom_ref defined in its
+  METADATA file.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  external_doc_ref = None
+  packages = []
+  relationships = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+                                       download_location=sbom_data.VALUE_NONE,
+                                       supplier='Organization: ' + args.product_mfr,
+                                       external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier=('Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=source_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+                                         name=name,
+                                         download_location=sbom_data.VALUE_NONE,
+                                         version=version if version else args.build_version,
+                                         supplier='Organization: ' + args.product_mfr)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version = version,
+                                         supplier=('Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+                                         download_location=download_location)
+    packages += [prebuilt_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=prebuilt_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+      sbom_url = metadata_proto.third_party.sbom_ref.url
+      sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+      upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+      if sbom_url and sbom_checksum and upstream_element_id:
+        doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+        external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+                                                               uri=sbom_url,
+                                                               checksum=sbom_checksum)
+        relationships.append(
+          sbom_data.Relationship(id1=upstream_package_id,
+                                 relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                 id2=doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+  checksums = [file.checksum for file in files]
+  checksums.sort()
+  h = hashlib.sha1()
+  h.update(''.join(checksums).encode(encoding='utf-8'))
+  return h.hexdigest()
+
+
+def save_report(report_file_path, report):
+  with open(report_file_path, 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_sbom_for_unbundled_apk():
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    doc = sbom_data.Document(name=args.build_version,
+                             namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                             creators=['Organization: ' + args.product_mfr])
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      if args.output_file != installed_file_metadata['build_output_path'] + '.spdx.json':
+        continue
+
+      module_path = installed_file_metadata['module_path']
+      package_id = new_package_id(module_path, PKG_PREBUILT)
+      package = sbom_data.Package(id=package_id,
+                                  name=module_path,
+                                  version=args.build_version,
+                                  supplier='Organization: ' + args.product_mfr)
+      file_id = new_file_id(installed_file)
+      file = sbom_data.File(id=file_id,
+                            name=installed_file,
+                            checksum=checksum(installed_file_metadata['build_output_path']))
+      relationship = sbom_data.Relationship(id1=file_id,
+                                            relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                            id2=package_id)
+      doc.add_package(package)
+      doc.files.append(file)
+      doc.describes = file_id
+      doc.add_relationship(relationship)
+      doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+      break
+
+  with open(args.output_file, 'w', encoding='utf-8') as file:
+    sbom_writers.JSONWriter.write(doc, file)
+  fragment_file = args.output_file.removesuffix('.spdx.json') + '-fragment.spdx'
+  with open(fragment_file, 'w', encoding='utf-8') as file:
+    sbom_writers.TagValueWriter.write(doc, file, fragment=True)
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  if args.unbundled_apk:
+    generate_sbom_for_unbundled_apk()
+    return
+
+  global metadata_file_protos
+  metadata_file_protos = {}
+
+  product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                                      name=sbom_data.PACKAGE_NAME_PRODUCT,
+                                      download_location=sbom_data.VALUE_NONE,
+                                      version=args.build_version,
+                                      supplier='Organization: ' + args.product_mfr,
+                                      files_analyzed=True)
+
+  doc = sbom_data.Document(name=args.build_version,
+                           namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                           creators=['Organization: ' + args.product_mfr])
+  if not args.unbundled_apex:
+    doc.packages.append(product_package)
+
+  doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                                        download_location=sbom_data.VALUE_NONE,
+                                        version=args.build_version,
+                                        supplier='Organization: ' + args.product_mfr))
+
+  # Report on some issues and information
+  report = {
+    ISSUE_NO_METADATA: [],
+    ISSUE_NO_METADATA_FILE: [],
+    ISSUE_METADATA_FILE_INCOMPLETE: [],
+    ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+    ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+    INFO_METADATA_FOUND_FOR_PACKAGE: [],
+  }
+
+  # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      module_path = installed_file_metadata['module_path']
+      product_copy_files = installed_file_metadata['product_copy_files']
+      kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+      build_output_path = installed_file_metadata['build_output_path']
+
+      if not installed_file_has_metadata(installed_file_metadata, report):
+        continue
+      if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+        report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+        continue
+
+      file_id = new_file_id(installed_file)
+      doc.files.append(
+        sbom_data.File(id=file_id, name=installed_file, checksum=checksum(build_output_path)))
+      if not args.unbundled_apex:
+        product_package.file_ids.append(file_id)
+      elif len(doc.files) > 1:
+          doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
+
+      if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+        metadata_file_path = get_metadata_file_path(installed_file_metadata)
+        report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+        # File from source fork packages or prebuilt fork packages
+        external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+        if len(pkgs) > 0:
+          if external_doc_ref:
+            doc.add_external_ref(external_doc_ref)
+          for p in pkgs:
+            doc.add_package(p)
+          for rel in rels:
+            doc.add_relationship(rel)
+          fork_package_id = pkgs[0].id  # The first package should be the source/prebuilt fork package
+          doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                      relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                      id2=fork_package_id))
+      elif module_path or installed_file_metadata['is_platform_generated']:
+        # File from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif product_copy_files:
+        # Format of product_copy_files: <source path>:<dest path>
+        src_path = product_copy_files.split(':')[0]
+        # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+        # so process them as files from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif installed_file.endswith('.fsv_meta'):
+        # See build/make/core/Makefile:2988
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+        # For the four files generated for _dlkm, _ramdisk partitions
+        # See build/make/core/Makefile:323
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+
+  if not args.unbundled_apex:
+    product_package.verification_code = generate_package_verification_code(doc.files)
+
+  if args.unbundled_apex:
+    doc.describes = doc.files[0].id
+
+  # Save SBOM records to output file
+  doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  prefix = args.output_file
+  if prefix.endswith('.spdx'):
+    prefix = prefix.removesuffix('.spdx')
+  elif prefix.endswith('.spdx.json'):
+    prefix = prefix.removesuffix('.spdx.json')
+
+  output_file = prefix + '.spdx'
+  if args.unbundled_apex:
+    output_file = prefix + '-fragment.spdx'
+  with open(output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file, fragment=args.unbundled_apex)
+  if args.json:
+    with open(prefix + '.spdx.json', 'w', encoding="utf-8") as file:
+      sbom_writers.JSONWriter.write(doc, file)
+
+  save_report(prefix + '-gen-report.txt', report)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
new file mode 100644
index 0000000..14c4eb2
--- /dev/null
+++ b/tools/sbom/sbom_data.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Define data classes that model SBOMs defined by SPDX. The data classes could be
+written out to different formats (tagvalue, JSON, etc) of SPDX with corresponding
+writer utilities.
+
+Rrefer to SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/ and go/android-spdx for details of
+fields in each data class.
+"""
+
+from dataclasses import dataclass, field
+from typing import List
+
+SPDXID_DOC = 'SPDXRef-DOCUMENT'
+SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
+SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+
+PACKAGE_NAME_PRODUCT = 'PRODUCT'
+PACKAGE_NAME_PLATFORM = 'PLATFORM'
+
+VALUE_NOASSERTION = 'NOASSERTION'
+VALUE_NONE = 'NONE'
+
+
+class PackageExternalRefCategory:
+  SECURITY = 'SECURITY'
+  PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+  PERSISTENT_ID = 'PERSISTENT-ID'
+  OTHER = 'OTHER'
+
+
+class PackageExternalRefType:
+  cpe22Type = 'cpe22Type'
+  cpe23Type = 'cpe23Type'
+
+
+@dataclass
+class PackageExternalRef:
+  category: PackageExternalRefCategory
+  type: PackageExternalRefType
+  locator: str
+
+
+@dataclass
+class Package:
+  name: str
+  id: str
+  version: str = None
+  supplier: str = None
+  download_location: str = None
+  files_analyzed: bool = False
+  verification_code: str = None
+  file_ids: List[str] = field(default_factory=list)
+  external_refs: List[PackageExternalRef] = field(default_factory=list)
+
+
+@dataclass
+class File:
+  id: str
+  name: str
+  checksum: str
+
+
+class RelationshipType:
+  DESCRIBES = 'DESCRIBES'
+  VARIANT_OF = 'VARIANT_OF'
+  GENERATED_FROM = 'GENERATED_FROM'
+  CONTAINS = 'CONTAINS'
+
+
+@dataclass
+class Relationship:
+  id1: str
+  relationship: RelationshipType
+  id2: str
+
+
+@dataclass
+class DocumentExternalReference:
+  id: str
+  uri: str
+  checksum: str
+
+
+@dataclass
+class Document:
+  name: str
+  namespace: str
+  id: str = SPDXID_DOC
+  describes: str = SPDXID_PRODUCT
+  creators: List[str] = field(default_factory=list)
+  created: str = None
+  external_refs: List[DocumentExternalReference] = field(default_factory=list)
+  packages: List[Package] = field(default_factory=list)
+  files: List[File] = field(default_factory=list)
+  relationships: List[Relationship] = field(default_factory=list)
+
+  def add_external_ref(self, external_ref):
+    if not any(external_ref.uri == ref.uri for ref in self.external_refs):
+      self.external_refs.append(external_ref)
+
+  def add_package(self, package):
+    if not any(package.id == p.id for p in self.packages):
+      self.packages.append(package)
+
+  def add_relationship(self, rel):
+    if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
+               for r in self.relationships):
+      self.relationships.append(rel)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
new file mode 100644
index 0000000..85dee9d
--- /dev/null
+++ b/tools/sbom/sbom_writers.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Serialize objects defined in package sbom_data to SPDX format: tagvalue, JSON.
+"""
+
+import json
+import sbom_data
+
+SPDX_VER = 'SPDX-2.3'
+DATA_LIC = 'CC0-1.0'
+
+
+class Tags:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'SPDXVersion'
+  DATA_LICENSE = 'DataLicense'
+  DOCUMENT_NAME = 'DocumentName'
+  DOCUMENT_NAMESPACE = 'DocumentNamespace'
+  CREATED = 'Created'
+  CREATOR = 'Creator'
+  EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+  # Package
+  PACKAGE_NAME = 'PackageName'
+  PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+  PACKAGE_VERSION = 'PackageVersion'
+  PACKAGE_SUPPLIER = 'PackageSupplier'
+  FILES_ANALYZED = 'FilesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+  PACKAGE_EXTERNAL_REF = 'ExternalRef'
+  # Package license
+  PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+  PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+  PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+  PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+  # File
+  FILE_NAME = 'FileName'
+  FILE_CHECKSUM = 'FileChecksum'
+  # File license
+  FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+  FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+  FILE_LICENSE_COMMENTS = 'LicenseComments'
+  FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+  FILE_NOTICE = 'FileNotice'
+  FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+  # Relationship
+  RELATIONSHIP = 'Relationship'
+
+
+class TagValueWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = [
+      f'{Tags.SPDX_VERSION}: {SPDX_VER}',
+      f'{Tags.DATA_LICENSE}: {DATA_LIC}',
+      f'{Tags.SPDXID}: {sbom_doc.id}',
+      f'{Tags.DOCUMENT_NAME}: {sbom_doc.name}',
+      f'{Tags.DOCUMENT_NAMESPACE}: {sbom_doc.namespace}',
+    ]
+    for creator in sbom_doc.creators:
+      headers.append(f'{Tags.CREATOR}: {creator}')
+    headers.append(f'{Tags.CREATED}: {sbom_doc.created}')
+    for doc_ref in sbom_doc.external_refs:
+      headers.append(
+        f'{Tags.EXTERNAL_DOCUMENT_REF}: {doc_ref.id} {doc_ref.uri} {doc_ref.checksum}')
+    headers.append('')
+    return headers
+
+  @staticmethod
+  def marshal_package(package):
+    download_location = sbom_data.VALUE_NOASSERTION
+    if package.download_location:
+      download_location = package.download_location
+    tagvalues = [
+      f'{Tags.PACKAGE_NAME}: {package.name}',
+      f'{Tags.SPDXID}: {package.id}',
+      f'{Tags.PACKAGE_DOWNLOAD_LOCATION}: {download_location}',
+      f'{Tags.FILES_ANALYZED}: {str(package.files_analyzed).lower()}',
+    ]
+    if package.version:
+      tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
+    if package.supplier:
+      tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+    if package.verification_code:
+      tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
+    if package.external_refs:
+      for external_ref in package.external_refs:
+        tagvalues.append(
+          f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
+
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def marshal_described_element(sbom_doc, fragment):
+    if not sbom_doc.describes:
+      return None
+
+    product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
+    if product_package:
+      tagvalues = TagValueWriter.marshal_package(product_package[0])
+      if not fragment:
+        tagvalues.append(
+            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+      tagvalues.append('')
+      return tagvalues
+
+    file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
+    if file:
+      tagvalues = TagValueWriter.marshal_file(file[0])
+      if not fragment:
+        tagvalues.append(
+            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+      return tagvalues
+
+    return None
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    tagvalues = []
+    marshaled_relationships = []
+    i = 0
+    packages = sbom_doc.packages
+    while i < len(packages):
+      if packages[i].id == sbom_doc.describes:
+        i += 1
+        continue
+
+      if i + 1 < len(packages) \
+          and packages[i].id.startswith('SPDXRef-SOURCE-') \
+          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+        rel = next((r for r in sbom_doc.relationships if
+                    r.id1 == packages[i].id and
+                    r.id2 == packages[i + 1].id and
+                    r.relationship == sbom_data.RelationshipType.VARIANT_OF), None)
+        if rel:
+          marshaled_relationships.append(rel)
+          tagvalues.append(TagValueWriter.marshal_relationship(rel))
+          tagvalues.append('')
+
+        i += 2
+      else:
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        i += 1
+
+    return tagvalues, marshaled_relationships
+
+  @staticmethod
+  def marshal_file(file):
+    tagvalues = [
+      f'{Tags.FILE_NAME}: {file.name}',
+      f'{Tags.SPDXID}: {file.id}',
+      f'{Tags.FILE_CHECKSUM}: {file.checksum}',
+      '',
+    ]
+
+    return tagvalues
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    tagvalues = []
+    for file in sbom_doc.files:
+      if file.id == sbom_doc.describes:
+        continue
+      tagvalues += TagValueWriter.marshal_file(file)
+    return tagvalues
+
+  @staticmethod
+  def marshal_relationship(rel):
+    return f'{Tags.RELATIONSHIP}: {rel.id1} {rel.relationship} {rel.id2}'
+
+  @staticmethod
+  def marshal_relationships(sbom_doc, marshaled_rels):
+    tagvalues = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.id2 + r.id1)
+    for rel in sorted_rels:
+      if any(r.id1 == rel.id1 and r.id2 == rel.id2 and r.relationship == rel.relationship
+             for r in marshaled_rels):
+        continue
+      tagvalues.append(TagValueWriter.marshal_relationship(rel))
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def write(sbom_doc, file, fragment=False):
+    content = []
+    if not fragment:
+      content += TagValueWriter.marshal_doc_headers(sbom_doc)
+    described_element = TagValueWriter.marshal_described_element(sbom_doc, fragment)
+    if described_element:
+      content += described_element
+    content += TagValueWriter.marshal_files(sbom_doc)
+    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+    content += tagvalues
+    content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+    file.write('\n'.join(content))
+
+
+class PropNames:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'spdxVersion'
+  DATA_LICENSE = 'dataLicense'
+  NAME = 'name'
+  DOCUMENT_NAMESPACE = 'documentNamespace'
+  CREATION_INFO = 'creationInfo'
+  CREATORS = 'creators'
+  CREATED = 'created'
+  EXTERNAL_DOCUMENT_REF = 'externalDocumentRefs'
+  DOCUMENT_DESCRIBES = 'documentDescribes'
+  EXTERNAL_DOCUMENT_ID = 'externalDocumentId'
+  EXTERNAL_DOCUMENT_URI = 'spdxDocument'
+  EXTERNAL_DOCUMENT_CHECKSUM = 'checksum'
+  ALGORITHM = 'algorithm'
+  CHECKSUM_VALUE = 'checksumValue'
+
+  # Package
+  PACKAGES = 'packages'
+  PACKAGE_DOWNLOAD_LOCATION = 'downloadLocation'
+  PACKAGE_VERSION = 'versionInfo'
+  PACKAGE_SUPPLIER = 'supplier'
+  FILES_ANALYZED = 'filesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'packageVerificationCode'
+  PACKAGE_VERIFICATION_CODE_VALUE = 'packageVerificationCodeValue'
+  PACKAGE_EXTERNAL_REFS = 'externalRefs'
+  PACKAGE_EXTERNAL_REF_CATEGORY = 'referenceCategory'
+  PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
+  PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
+  PACKAGE_HAS_FILES = 'hasFiles'
+
+  # File
+  FILES = 'files'
+  FILE_NAME = 'fileName'
+  FILE_CHECKSUMS = 'checksums'
+
+  # Relationship
+  RELATIONSHIPS = 'relationships'
+  REL_ELEMENT_ID = 'spdxElementId'
+  REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
+  REL_TYPE = 'relationshipType'
+
+
+class JSONWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = {
+      PropNames.SPDX_VERSION: SPDX_VER,
+      PropNames.DATA_LICENSE: DATA_LIC,
+      PropNames.SPDXID: sbom_doc.id,
+      PropNames.NAME: sbom_doc.name,
+      PropNames.DOCUMENT_NAMESPACE: sbom_doc.namespace,
+      PropNames.CREATION_INFO: {}
+    }
+    creators = [creator for creator in sbom_doc.creators]
+    headers[PropNames.CREATION_INFO][PropNames.CREATORS] = creators
+    headers[PropNames.CREATION_INFO][PropNames.CREATED] = sbom_doc.created
+    external_refs = []
+    for doc_ref in sbom_doc.external_refs:
+      checksum = doc_ref.checksum.split(': ')
+      external_refs.append({
+        PropNames.EXTERNAL_DOCUMENT_ID: f'{doc_ref.id}',
+        PropNames.EXTERNAL_DOCUMENT_URI: doc_ref.uri,
+        PropNames.EXTERNAL_DOCUMENT_CHECKSUM: {
+          PropNames.ALGORITHM: checksum[0],
+          PropNames.CHECKSUM_VALUE: checksum[1]
+        }
+      })
+    if external_refs:
+      headers[PropNames.EXTERNAL_DOCUMENT_REF] = external_refs
+    headers[PropNames.DOCUMENT_DESCRIBES] = [sbom_doc.describes]
+
+    return headers
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    packages = []
+    for p in sbom_doc.packages:
+      package = {
+        PropNames.NAME: p.name,
+        PropNames.SPDXID: p.id,
+        PropNames.PACKAGE_DOWNLOAD_LOCATION: p.download_location if p.download_location else sbom_data.VALUE_NOASSERTION,
+        PropNames.FILES_ANALYZED: p.files_analyzed
+      }
+      if p.version:
+        package[PropNames.PACKAGE_VERSION] = p.version
+      if p.supplier:
+        package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+      if p.verification_code:
+        package[PropNames.PACKAGE_VERIFICATION_CODE] = {
+          PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
+        }
+      if p.external_refs:
+        package[PropNames.PACKAGE_EXTERNAL_REFS] = []
+        for ref in p.external_refs:
+          ext_ref = {
+            PropNames.PACKAGE_EXTERNAL_REF_CATEGORY: ref.category,
+            PropNames.PACKAGE_EXTERNAL_REF_TYPE: ref.type,
+            PropNames.PACKAGE_EXTERNAL_REF_LOCATOR: ref.locator,
+          }
+          package[PropNames.PACKAGE_EXTERNAL_REFS].append(ext_ref)
+      if p.file_ids:
+        package[PropNames.PACKAGE_HAS_FILES] = []
+        for file_id in p.file_ids:
+          package[PropNames.PACKAGE_HAS_FILES].append(file_id)
+
+      packages.append(package)
+
+    return {PropNames.PACKAGES: packages}
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    files = []
+    for f in sbom_doc.files:
+      file = {
+        PropNames.FILE_NAME: f.name,
+        PropNames.SPDXID: f.id
+      }
+      checksum = f.checksum.split(': ')
+      file[PropNames.FILE_CHECKSUMS] = [{
+        PropNames.ALGORITHM: checksum[0],
+        PropNames.CHECKSUM_VALUE: checksum[1],
+      }]
+      files.append(file)
+    return {PropNames.FILES: files}
+
+  @staticmethod
+  def marshal_relationships(sbom_doc):
+    relationships = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.relationship + r.id2 + r.id1)
+    for r in sorted_rels:
+      rel = {
+        PropNames.REL_ELEMENT_ID: r.id1,
+        PropNames.REL_RELATED_ELEMENT_ID: r.id2,
+        PropNames.REL_TYPE: r.relationship,
+      }
+      relationships.append(rel)
+
+    return {PropNames.RELATIONSHIPS: relationships}
+
+  @staticmethod
+  def write(sbom_doc, file):
+    doc = {}
+    doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
+    doc.update(JSONWriter.marshal_packages(sbom_doc))
+    doc.update(JSONWriter.marshal_files(sbom_doc))
+    doc.update(JSONWriter.marshal_relationships(sbom_doc))
+    file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
new file mode 100644
index 0000000..361dae6
--- /dev/null
+++ b/tools/sbom/sbom_writers_test.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import pathlib
+import unittest
+import sbom_data
+import sbom_writers
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+
+
+class SBOMWritersTest(unittest.TestCase):
+
+  def setUp(self):
+    # SBOM of a product
+    self.sbom_doc = sbom_data.Document(name='test doc',
+                                       namespace='http://www.google.com/sbom/spdx/android',
+                                       creators=[SUPPLIER_GOOGLE],
+                                       created='2023-03-31T22:17:58Z',
+                                       describes=sbom_data.SPDXID_PRODUCT)
+    self.sbom_doc.add_external_ref(
+      sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+                                          uri='external_doc_uri',
+                                          checksum='SHA1: 1234567890'))
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                        name=sbom_data.PACKAGE_NAME_PRODUCT,
+                        download_location=sbom_data.VALUE_NONE,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        files_analyzed=True,
+                        verification_code='123456',
+                        file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3]))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                        download_location=sbom_data.VALUE_NONE,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+                        name='Prebuilt package1',
+                        download_location=sbom_data.VALUE_NONE,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Source package1',
+                        download_location=sbom_data.VALUE_NONE,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        external_refs=[sbom_data.PackageExternalRef(
+                          category=sbom_data.PackageExternalRefCategory.SECURITY,
+                          type=sbom_data.PackageExternalRefType.cpe22Type,
+                          locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+                        name='Upstream package1',
+                        supplier=SUPPLIER_UPSTREAM,
+                        version='1.1',
+                        ))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+                                                          relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                          id2=SPDXID_UPSTREAM_PACKAGE1))
+
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=sbom_data.SPDXID_PLATFORM))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_PREBUILT_PACKAGE1))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_SOURCE_PACKAGE1
+                                                          ))
+
+    # SBOM fragment of a APK
+    self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
+                                                 namespace='http://www.google.com/sbom/spdx/android',
+                                                 creators=[SUPPLIER_GOOGLE],
+                                                 created='2023-03-31T22:17:58Z',
+                                                 describes=SPDXID_FILE1)
+
+    self.unbundled_sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1.apk', checksum='SHA1: 11111'))
+    self.unbundled_sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Unbundled apk package',
+                        download_location=sbom_data.VALUE_NONE,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT))
+    self.unbundled_sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                                    id2=SPDXID_SOURCE_PACKAGE1))
+
+  def test_tagvalue_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_tagvalue_writer_unbundled(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_unbundled.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_json_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.JSONWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_json_sbom.spdx.json').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
new file mode 100644
index 0000000..32715a5
--- /dev/null
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -0,0 +1,137 @@
+{
+    "spdxVersion": "SPDX-2.3",
+    "dataLicense": "CC0-1.0",
+    "SPDXID": "SPDXRef-DOCUMENT",
+    "name": "test doc",
+    "documentNamespace": "http://www.google.com/sbom/spdx/android",
+    "creationInfo": {
+        "creators": [
+            "Organization: Google"
+        ],
+        "created": "2023-03-31T22:17:58Z"
+    },
+    "externalDocumentRefs": [
+        {
+            "externalDocumentId": "DocumentRef-external_doc_ref",
+            "spdxDocument": "external_doc_uri",
+            "checksum": {
+                "algorithm": "SHA1",
+                "checksumValue": "1234567890"
+            }
+        }
+    ],
+    "documentDescribes": [
+        "SPDXRef-PRODUCT"
+    ],
+    "packages": [
+        {
+            "name": "PRODUCT",
+            "SPDXID": "SPDXRef-PRODUCT",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": true,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "packageVerificationCode": {
+                "packageVerificationCodeValue": "123456"
+            },
+            "hasFiles": [
+                "SPDXRef-file1",
+                "SPDXRef-file2",
+                "SPDXRef-file3"
+            ]
+        },
+        {
+            "name": "PLATFORM",
+            "SPDXID": "SPDXRef-PLATFORM",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Prebuilt package1",
+            "SPDXID": "SPDXRef-PREBUILT-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Source package1",
+            "SPDXID": "SPDXRef-SOURCE-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "externalRefs": [
+                {
+                    "referenceCategory": "SECURITY",
+                    "referenceType": "cpe22Type",
+                    "referenceLocator": "cpe:/a:jsoncpp_project:jsoncpp:1.9.4"
+                }
+            ]
+        },
+        {
+            "name": "Upstream package1",
+            "SPDXID": "SPDXRef-UPSTREAM-package1",
+            "downloadLocation": "NOASSERTION",
+            "filesAnalyzed": false,
+            "versionInfo": "1.1",
+            "supplier": "Organization: upstream"
+        }
+    ],
+    "files": [
+        {
+            "fileName": "/bin/file1",
+            "SPDXID": "SPDXRef-file1",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "11111"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file2",
+            "SPDXID": "SPDXRef-file2",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "22222"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file3",
+            "SPDXID": "SPDXRef-file3",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "33333"
+                }
+            ]
+        }
+    ],
+    "relationships": [
+        {
+            "spdxElementId": "SPDXRef-file1",
+            "relatedSpdxElement": "SPDXRef-PLATFORM",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file2",
+            "relatedSpdxElement": "SPDXRef-PREBUILT-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file3",
+            "relatedSpdxElement": "SPDXRef-SOURCE-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-SOURCE-package1",
+            "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
+            "relationshipType": "VARIANT_OF"
+        }
+    ]
+}
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
new file mode 100644
index 0000000..ee39e82
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -0,0 +1,65 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NOASSERTION
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
new file mode 100644
index 0000000..a00c291
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -0,0 +1,12 @@
+FileName: /bin/file1.apk
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+PackageName: Unbundled apk package
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/test_post_process_props.py b/tools/test_post_process_props.py
index 236f9ed..439fc9f 100644
--- a/tools/test_post_process_props.py
+++ b/tools/test_post_process_props.py
@@ -256,6 +256,7 @@
     with contextlib.redirect_stderr(stderr_redirect):
       props = PropList("hello")
       props.put("ro.board.first_api_level","25")
+      props.put("ro.build.version.codename", "REL")
 
       # ro.board.first_api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 20))
@@ -273,5 +274,10 @@
       # ro.board.api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 25))
 
+      # allow setting future api_level before release
+      props.get_all_props()[-2].make_as_comment()
+      props.put("ro.build.version.codename", "NonRel")
+      self.assertTrue(validate_grf_props(props, 24))
+
 if __name__ == '__main__':
     unittest.main(verbosity=2)