Merge "Revert "remove health library from VNDK""
diff --git a/Changes.md b/Changes.md
index 8979e30..daebd52 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,21 @@
 # Build System Changes for Android.mk Writers
 
+## Python 2 to 3 migration
+
+The path set when running builds now makes the `python` executable point to python 3,
+whereas on previous versions it pointed to python 2. If you still have python 2 scripts,
+you can change the shebang line to use `python2` explicitly. This only applies for
+scripts run directly from makefiles, or from soong genrules. This behavior can be
+temporarily overridden by setting the `BUILD_BROKEN_PYTHON_IS_PYTHON2` environment
+variable to `true`. It's only an environment variable and not a product config variable
+because product config sometimes calls python code.
+
+In addition, `python_*` soong modules no longer allow python 2. This can be temporarily
+overridden by setting the `BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES` product configuration
+variable to `true`.
+
+Python 2 is slated for complete removal in V.
+
 ## Stop referencing sysprop_library directly from cc modules
 
 For the migration to Bazel, we are no longer mapping sysprop_library targets
@@ -818,7 +834,7 @@
 
 ### Stop using clang property
 
-Clang has been deleted from Soong. To fix any build errors, remove the clang
+The clang property has been deleted from Soong. To fix any build errors, remove the clang
 property from affected Android.bp files using bpmodify.
 
 
diff --git a/OWNERS b/OWNERS
index 57d8994..97fda40 100644
--- a/OWNERS
+++ b/OWNERS
@@ -5,5 +5,3 @@
 per-file envsetup.sh = joeo@google.com, jingwen@google.com, lberki@google.com
 per-file shell_utils.sh = joeo@google.com, jingwen@google.com, lberki@google.com
 
-# Finalization scripts
-per-file finalize* = smoreland@google.com, alexbuy@google.com
diff --git a/core/Makefile b/core/Makefile
index 2595101..c4d5530 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -181,6 +181,7 @@
 
 ifeq ($(HOST_OS),linux)
 $(call dist-for-goals,sdk,$(API_FINGERPRINT))
+$(call dist-for-goals,droidcore,$(API_FINGERPRINT))
 endif
 
 INSTALLED_RECOVERYIMAGE_TARGET :=
@@ -473,7 +474,10 @@
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)))) \
   $(if $(filter false,$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver))),\
     $(eval BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver) :=),) \
-  $(call copy-many-files,$(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver),$(7),$(8)))) \
+  $(eval _files := $(call build-image-kernel-modules,$(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)),$(2),$(3),$(call intermediates-dir-for,PACKAGING,depmod_$(1)$(_sep)$(_kver)),$(BOARD_$(1)_KERNEL_MODULES_LOAD$(_sep)$(_kver)),$(4),$(BOARD_$(1)_KERNEL_MODULES_ARCHIVE$(_sep)$(_kver)),$(_stripped_staging_dir),$(_kver),$(7),$(8))) \
+  $(call copy-many-files,$(_files)) \
+  $(eval _modules := $(BOARD_$(1)_KERNEL_MODULES$(_sep)$(_kver)) ANDROID-GEN ANDROID-GEN ANDROID-GEN ANDROID-GEN) \
+  $(eval KERNEL_MODULE_COPY_FILES += $(join $(addsuffix :,$(_modules)),$(_files)))) \
 $(if $(_kver), \
   $(eval _dir := $(_kver)/), \
   $(eval _dir :=)) \
@@ -486,6 +490,7 @@
   $(eval $(call build-image-kernel-modules-blocklist-file, \
     $(BOARD_$(1)_KERNEL_MODULES_BLOCKLIST_FILE$(_sep)$(_kver)), \
     $(2)/lib/modules/$(_dir)modules.blocklist)) \
+  $(eval ALL_KERNEL_MODULES_BLOCKLIST += $(2)/lib/modules/$(_dir)modules.blocklist) \
   $(2)/lib/modules/$(_dir)modules.blocklist)
 endef
 
@@ -508,6 +513,15 @@
 endef
 
 # $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-kernel-ramdisk-recovery-load
+$(if $(filter top,$(1)),\
+  $(eval _kver :=)$(eval _sep :=),\
+  $(eval _kver := $(1))$(eval _sep :=_))\
+  $(if $(BOARD_VENDOR_KERNEL_RAMDISK_RECOVERY_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+    $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_kernel_ramdisk_recovery_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_RECOVERY_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.recovery,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT))))
+endef
+
+# $(1): kernel module directory name (top is an out of band value for no directory)
 define build-vendor-charger-load
 $(if $(filter top,$(1)),\
   $(eval _kver :=)$(eval _sep :=),\
@@ -578,6 +592,7 @@
   $(eval $(result_var) += $(call build-image-kernel-modules-dir,VENDOR_RAMDISK,$(output_dir),,modules.load,$(VENDOR_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR_KERNEL_RAMDISK,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT),,modules.load,$(VENDOR_KERNEL_RAMDISK_STRIPPED_MODULE_STAGING_DIR),$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-recovery-load,$(kmd))) \
+  $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-recovery-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd),$(BOARD_SYSTEM_KERNEL_MODULES),system)) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(kmd))) \
   $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
@@ -586,6 +601,19 @@
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-recovery-as-boot-load,$(kmd))),\
     $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,GENERIC_RAMDISK,$(TARGET_RAMDISK_OUT),,modules.load,,$(kmd)))))
 
+ifeq ($(BOARD_SYSTEM_KERNEL_MODULES),)
+ifneq ($(BOARD_SYSTEM_DLKM_SRC),)
+ifneq ($(wildcard $(BOARD_SYSTEM_DLKM_SRC)/*),)
+  SYSTEM_KERNEL_MODULES := $(shell find $(BOARD_SYSTEM_DLKM_SRC) -type f)
+  SRC_SYSTEM_KERNEL_MODULES := $(SYSTEM_KERNEL_MODULES)
+  DST_SYSTEM_KERNEL_MODULES := $(patsubst $(BOARD_SYSTEM_DLKM_SRC)/%,:$(TARGET_OUT_SYSTEM_DLKM)/%,$(SRC_SYSTEM_KERNEL_MODULES))
+  SYSTEM_KERNEL_MODULE_COPY_PAIRS := $(join $(SRC_SYSTEM_KERNEL_MODULES),$(DST_SYSTEM_KERNEL_MODULES))
+  ALL_DEFAULT_INSTALLED_MODULES += $(call copy-many-files,$(SYSTEM_KERNEL_MODULE_COPY_PAIRS))
+endif
+endif
+endif
+
+
 # -----------------------------------------------------------------
 # Cert-to-package mapping.  Used by the post-build signing tools.
 # Use a macro to add newline to each echo command
@@ -918,16 +946,19 @@
 RAMDISK_EXT := .gz
 endif
 
+# This file contains /dev nodes description added to the generic ramdisk
+RAMDISK_NODE_LIST := $(PRODUCT_OUT)/ramdisk_node_list
+
 # We just build this directly to the install location.
 INSTALLED_RAMDISK_TARGET := $(BUILT_RAMDISK_TARGET)
 $(INSTALLED_RAMDISK_TARGET): PRIVATE_DIRS := debug_ramdisk dev metadata mnt proc second_stage_resources sys
-$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
+$(INSTALLED_RAMDISK_TARGET): $(MKBOOTFS) $(RAMDISK_NODE_LIST) $(INTERNAL_RAMDISK_FILES) $(INSTALLED_FILES_FILE_RAMDISK) | $(COMPRESSION_COMMAND_DEPS)
 	$(call pretty,"Target ramdisk: $@")
 	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/,$(PRIVATE_DIRS))
 ifeq (true,$(BOARD_USES_GENERIC_KERNEL_IMAGE))
 	$(hide) mkdir -p $(addprefix $(TARGET_RAMDISK_OUT)/first_stage_ramdisk/,$(PRIVATE_DIRS))
 endif
-	$(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
+	$(hide) $(MKBOOTFS) -n $(RAMDISK_NODE_LIST) -d $(TARGET_OUT) $(TARGET_RAMDISK_OUT) | $(COMPRESSION_COMMAND) > $@
 
 $(call declare-1p-container,$(INSTALLED_RAMDISK_TARGET),)
 $(call declare-container-license-deps,$(INSTALLED_RAMDISK_TARGET),$(INTERNAL_RAMDISK_FILE),$(PRODUCT_OUT)/:/)
@@ -1262,7 +1293,7 @@
 	$(AVBTOOL) add_hash_footer \
 	    --image $@ \
 	    $(call get-partition-size-argument,$(BOARD_INIT_BOOT_IMAGE_PARTITION_SIZE)) \
-	    --partition_name boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
+	    --partition_name init_boot $(INTERNAL_AVB_INIT_BOOT_SIGNING_ARGS) \
 	    $(BOARD_AVB_INIT_BOOT_ADD_HASH_FOOTER_ARGS)
 
 $(call declare-1p-container,$(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
@@ -1608,6 +1639,21 @@
 target_system_dlkm_notice_file_xml_gz := $(TARGET_OUT_INTERMEDIATES)/NOTICE_SYSTEM_DLKM.xml.gz
 installed_system_dlkm_notice_xml_gz := $(TARGET_OUT_SYSTEM_DLKM)/etc/NOTICE.xml.gz
 
+ALL_INSTALLED_NOTICE_FILES := \
+  $(installed_notice_html_or_xml_gz) \
+  $(installed_vendor_notice_xml_gz) \
+  $(installed_product_notice_xml_gz) \
+  $(installed_system_ext_notice_xml_gz) \
+  $(installed_odm_notice_xml_gz) \
+  $(installed_vendor_dlkm_notice_xml_gz) \
+  $(installed_odm_dlkm_notice_xml_gz) \
+  $(installed_system_dlkm_notice_xml_gz) \
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system_ext/etc/NOTICE.xml.gz
+define is-notice-file
+$(if $(findstring $1,$(ALL_INSTALLED_NOTICE_FILES)),Y)
+endef
+
 # Notice files are copied to TARGET_OUT_NOTICE_FILES as a side-effect of their module
 # being built. A notice xml file must depend on all modules that could potentially
 # install a license file relevant to it.
@@ -3034,15 +3080,19 @@
 	    --cert $$(PRIVATE_KEY).x509.pem \
 	    --key $$(PRIVATE_KEY).pk8
 
-ALL_DEFAULT_INSTALLED_MODULES += $(1)
+$(1).idsig: $(1)
+
+ALL_DEFAULT_INSTALLED_MODULES += $(1) $(1).idsig
 
 endef  # fsverity-generate-and-install-manifest-apk
 
 $(eval $(call fsverity-generate-and-install-manifest-apk, \
   $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk,system))
+ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk $(TARGET_OUT)/etc/security/fsverity/BuildManifest.apk.idsig
 ifdef BUILDING_SYSTEM_EXT_IMAGE
   $(eval $(call fsverity-generate-and-install-manifest-apk, \
     $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk,system_ext))
+  ALL_FSVERITY_BUILD_MANIFEST_APK += $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk $(TARGET_OUT_SYSTEM_EXT)/etc/security/fsverity/BuildManifestSystemExt.apk.idsig
 endif
 
 endif  # PRODUCT_FSVERITY_GENERATE_METADATA
@@ -3104,16 +3154,20 @@
 SYSTEM_LINKER_CONFIG := $(TARGET_OUT)/etc/linker.config.pb
 SYSTEM_LINKER_CONFIG_SOURCE := $(call intermediates-dir-for,ETC,system_linker_config)/system_linker_config
 $(SYSTEM_LINKER_CONFIG): PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE := $(SYSTEM_LINKER_CONFIG_SOURCE)
-$(SYSTEM_LINKER_CONFIG) : $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+$(SYSTEM_LINKER_CONFIG): $(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE) | conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $(PRIVATE_SYSTEM_LINKER_CONFIG_SOURCE) \
-	  --output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT)"
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config append --source $@ --output $@ --key requireLibs \
-	 --value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
+		--value "$(foreach lib,$(LLNDK_MOVED_TO_APEX_LIBRARIES), $(lib).so)"
 
 $(call declare-1p-target,$(SYSTEM_LINKER_CONFIG),)
 $(call declare-license-deps,$(SYSTEM_LINKER_CONFIG),$(INTERNAL_SYSTEMIMAGE_FILES) $(SYSTEM_LINKER_CONFIG_SOURCE))
 
 FULL_SYSTEMIMAGE_DEPS += $(SYSTEM_LINKER_CONFIG)
+ALL_DEFAULT_INSTALLED_MODULES += $(SYSTEM_LINKER_CONFIG)
 
 # installed file list
 # Depending on anything that $(BUILT_SYSTEMIMAGE) depends on.
@@ -3486,17 +3540,21 @@
   INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
 endif
 
-# Install vendor/etc/linker.config.pb when PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS is set
-ifneq ($(strip $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)),)
+# Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
 vendor_linker_config_file := $(TARGET_OUT_VENDOR)/etc/linker.config.pb
 $(vendor_linker_config_file): private_linker_config_fragments := $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS)
-$(vendor_linker_config_file): $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+$(vendor_linker_config_file): $(INTERNAL_VENDORIMAGE_FILES) $(PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS) | $(HOST_OUT_EXECUTABLES)/conv_linker_config
+	@echo Creating linker config: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
 	$(HOST_OUT_EXECUTABLES)/conv_linker_config proto \
 		--source $(call normalize-path-list,$(private_linker_config_fragments)) \
 		--output $@
+	$(HOST_OUT_EXECUTABLES)/conv_linker_config systemprovide --source $@ \
+		--output $@ --value "$(STUB_LIBRARIES)" --system "$(TARGET_OUT_VENDOR)"
 $(call define declare-0p-target,$(vendor_linker_config_file),)
 INTERNAL_VENDORIMAGE_FILES += $(vendor_linker_config_file)
-endif
+ALL_DEFAULT_INSTALLED_MODULES += $(vendor_linker_config_file)
 
 INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
 INSTALLED_FILES_JSON_VENDOR := $(INSTALLED_FILES_FILE_VENDOR:.txt=.json)
@@ -4696,7 +4754,7 @@
 check_vintf_all_deps += $(check_vintf_system_log)
 $(check_vintf_system_log): $(HOST_OUT_EXECUTABLES)/checkvintf $(check_vintf_system_deps)
 	@( $< --check-one --dirmap /system:$(TARGET_OUT) > $@ 2>&1 ) || ( cat $@ && exit 1 )
-$(call declare-0p-target,$(check_vintf_system_log))
+$(call declare-1p-target,$(check_vintf_system_log))
 check_vintf_system_log :=
 
 # -- Check framework manifest against frozen manifests for GSI targets. They need to be compatible.
@@ -4708,7 +4766,7 @@
 	@( $< --check --dirmap /system:$(TARGET_OUT) \
 	  $(VINTF_FRAMEWORK_MANIFEST_FROZEN_DIR) > $@ 2>&1 ) || ( cat $@ && exit 1 )
 
-$(call declare-0p-target,$(vintffm_log))
+$(call declare-1p-target,$(vintffm_log))
 
 endif # check_vintf_system_deps
 check_vintf_system_deps :=
@@ -4732,7 +4790,7 @@
 	  ( $< --check-one --dirmap /vendor:$(TARGET_OUT_VENDOR) --dirmap /apex:$(APEX_OUT) \
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       > $@ 2>&1 ) || ( cat $@ && exit 1 ); )
-$(call declare-0p-target,$(check_vintf_vendor_log))
+$(call declare-1p-target,$(check_vintf_vendor_log))
 check_vintf_vendor_log :=
 endif # check_vintf_vendor_deps
 check_vintf_vendor_deps :=
@@ -4754,8 +4812,8 @@
 $(BUILT_KERNEL_VERSION_FILE):
 	echo $(BOARD_KERNEL_VERSION) > $@
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
-$(call declare-0p-target,$(BUILT_KERNEL_VERSION_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
+$(call declare-license-metadata,$(BUILT_KERNEL_VERSION_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # BOARD_KERNEL_VERSION
@@ -4780,7 +4838,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_KERNEL_TARGET
@@ -4801,7 +4859,7 @@
 	  --output-configs $@ \
 	  --output-release $(BUILT_KERNEL_VERSION_FILE)
 
-$(call declare-0p-target,$(BUILT_KERNEL_CONFIGS_FILE))
+$(call declare-license-metadata,$(BUILT_KERNEL_CONFIGS_FILE),SPDX-license-identifier-GPL-2.0-only,restricted,$(BUILD_SYSTEM)/LINUX_KERNEL_COPYING,"Kernel",kernel)
 
 my_board_extracted_kernel := true
 endif # INSTALLED_BOOTIMAGE_TARGET
@@ -4893,7 +4951,7 @@
 	       --property ro.boot.product.vendor.sku=$(filter-out EMPTY_VENDOR_SKU_PLACEHOLDER,$(vendor_sku)) \
 	       >> $@ 2>&1 ) || (cat $@ && exit 1); ))
 
-$(call declare-0p-target,$(check_vintf_compatible_log))
+$(call declare-1p-target,$(check_vintf_compatible_log))
 
 check_vintf_compatible_log :=
 check_vintf_compatible_args :=
@@ -4958,7 +5016,7 @@
 	  $(call intermediates-dir-for,PACKAGING,check-all-partition-sizes)/misc_info.txt, \
 	  $@)
 
-$(call declare-0p-target,$(check_all_partition_sizes_log))
+$(call declare-1p-target,$(check_all_partition_sizes_log))
 
 .PHONY: check-all-partition-sizes
 check-all-partition-sizes: $(check_all_partition_sizes_log)
@@ -5060,6 +5118,7 @@
   img2simg \
   img_from_target_files \
   imgdiff \
+  initrd_bootconfig \
   libconscrypt_openjdk_jni \
   lpmake \
   lpunpack \
@@ -5100,6 +5159,8 @@
   verity_verifier \
   zipalign \
   zucchini \
+  zip2zip \
+
 
 # Additional tools to unpack and repack the apex file.
 INTERNAL_OTATOOLS_MODULES += \
@@ -5110,13 +5171,14 @@
   debugfs_static \
   dump_apex_info \
   fsck.erofs \
+  make_erofs \
   merge_zips \
   resize2fs \
   soong_zip \
 
 ifeq (true,$(PRODUCT_SUPPORTS_VBOOT))
 INTERNAL_OTATOOLS_MODULES += \
-  futility \
+  futility-host \
   vboot_signer
 endif
 
@@ -6132,6 +6194,7 @@
 ifdef BUILDING_INIT_BOOT_IMAGE
 	$(hide) $(call package_files-copy-root, $(TARGET_RAMDISK_OUT),$(zip_root)/INIT_BOOT/RAMDISK)
 	$(hide) $(call fs_config,$(zip_root)/INIT_BOOT/RAMDISK,) > $(zip_root)/META/init_boot_filesystem_config.txt
+	$(hide) cp $(RAMDISK_NODE_LIST) $(zip_root)/META/ramdisk_node_list
 ifdef BOARD_KERNEL_PAGESIZE
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/INIT_BOOT/pagesize
 endif # BOARD_KERNEL_PAGESIZE
diff --git a/core/OWNERS b/core/OWNERS
index 762d2a7..eb1d5c3 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -1,6 +1,3 @@
-per-file *dex_preopt*.* = jiakaiz@google.com,ngeoffray@google.com,skvadrik@google.com
-per-file art_*.* = jiakaiz@google.com,ngeoffray@google.com,skvadrik@google.com
-per-file verify_uses_libraries.sh = ngeoffray@google.com,skvadrik@google.com
 
 # For global Proguard rules
 per-file proguard*.flags = jdduke@google.com
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index bf113ee..6f0706e 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -71,12 +71,17 @@
 
 $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
 
+ifdef TARGET_BOARD_AUTO
+  $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
+endif
+
 # Ensure that those mainline modules who have individually toggleable prebuilts
 # are controlled by the MODULE_BUILD_FROM_SOURCE environment variable by
 # default.
 INDIVIDUALLY_TOGGLEABLE_PREBUILT_MODULES := \
   bluetooth \
   permission \
+  rkpd \
   uwb \
   wifi \
 
@@ -106,6 +111,10 @@
 SYSTEMUI_OPTIMIZE_JAVA ?= true
 $(call add_soong_config_var,ANDROID,SYSTEMUI_OPTIMIZE_JAVA)
 
+# Disable Compose in SystemUI by default.
+SYSTEMUI_USE_COMPOSE ?= false
+$(call add_soong_config_var,ANDROID,SYSTEMUI_USE_COMPOSE)
+
 ifdef PRODUCT_AVF_ENABLED
 $(call add_soong_config_var_value,ANDROID,avf_enabled,$(PRODUCT_AVF_ENABLED))
 endif
diff --git a/core/app_prebuilt_internal.mk b/core/app_prebuilt_internal.mk
index eb429cd..9fab44d 100644
--- a/core/app_prebuilt_internal.mk
+++ b/core/app_prebuilt_internal.mk
@@ -302,3 +302,7 @@
 
 endif # LOCAL_PACKAGE_SPLITS
 
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/base_rules.mk b/core/base_rules.mk
index 18730aa..c453469 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -716,6 +716,15 @@
 ## Compatibility suite files.
 ###########################################################
 ifdef LOCAL_COMPATIBILITY_SUITE
+
+ifneq (,$(LOCAL_FULL_TEST_CONFIG))
+  test_config := $(LOCAL_FULL_TEST_CONFIG)
+else ifneq (,$(LOCAL_TEST_CONFIG))
+  test_config := $(LOCAL_PATH)/$(LOCAL_TEST_CONFIG)
+else
+  test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
+endif
+
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
 
 # If we are building a native test or benchmark and its stem variants are not defined,
@@ -762,13 +771,6 @@
 
 
 # Auto-generate build config.
-ifneq (,$(LOCAL_FULL_TEST_CONFIG))
-  test_config := $(LOCAL_FULL_TEST_CONFIG)
-else ifneq (,$(LOCAL_TEST_CONFIG))
-  test_config := $(LOCAL_PATH)/$(LOCAL_TEST_CONFIG)
-else
-  test_config := $(wildcard $(LOCAL_PATH)/AndroidTest.xml)
-endif
 ifeq (,$(test_config))
   ifneq (true,$(is_native))
     is_instrumentation_test := true
@@ -847,16 +849,6 @@
   endif
 endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
 
-# HACK: pretend a soong LOCAL_FULL_TEST_CONFIG is autogenerated by setting the flag in
-# module-info.json
-# TODO: (b/113029686) Add explicit flag from Soong to determine if a test was
-# autogenerated.
-ifneq (,$(filter $(SOONG_OUT_DIR)%,$(LOCAL_FULL_TEST_CONFIG)))
-  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
-    ALL_MODULES.$(my_register_name).auto_test_config := true
-  endif
-endif
-
 
 ifeq ($(use_testcase_folder),true)
 ifneq ($(my_test_data_file_pairs),)
@@ -897,6 +889,17 @@
   $(eval my_compat_dist_test_data_$(suite) := ))
 
 endif  # LOCAL_UNINSTALLABLE_MODULE
+
+# HACK: pretend a soong LOCAL_FULL_TEST_CONFIG is autogenerated by setting the flag in
+# module-info.json
+# TODO: (b/113029686) Add explicit flag from Soong to determine if a test was
+# autogenerated.
+ifneq (,$(filter $(SOONG_OUT_DIR)%,$(LOCAL_FULL_TEST_CONFIG)))
+  ifeq ($(LOCAL_MODULE_MAKEFILE),$(SOONG_ANDROID_MK))
+    ALL_MODULES.$(my_register_name).auto_test_config := true
+  endif
+endif
+
 endif  # LOCAL_COMPATIBILITY_SUITE
 
 my_supported_variant :=
@@ -946,6 +949,8 @@
     $(ALL_MODULES.$(my_register_name).CHECKED) $(my_checked_module)
 ALL_MODULES.$(my_register_name).BUILT := \
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
+ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE := \
+    $(ALL_MODULES.$(my_register_name).SOONG_MODULE_TYPE) $(LOCAL_SOONG_MODULE_TYPE)
 ifndef LOCAL_IS_HOST_MODULE
 ALL_MODULES.$(my_register_name).TARGET_BUILT := \
     $(ALL_MODULES.$(my_register_name).TARGET_BUILT) $(LOCAL_BUILT_MODULE)
@@ -1237,3 +1242,8 @@
 ###########################################################
 
 include $(BUILD_NOTICE_FILE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/binary.mk b/core/binary.mk
index 6320726..6f1d814 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -58,6 +58,9 @@
 my_cppflags := $(LOCAL_CPPFLAGS)
 my_cflags_no_override := $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
 my_cppflags_no_override := $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
+ifeq ($(my_32_64_bit_suffix), 64)
+  my_cflags_no_override += $(GLOBAL_CLANG_CFLAGS_64_NO_OVERRIDE)
+endif
 ifdef is_third_party
     my_cflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
     my_cppflags_no_override += $(GLOBAL_CLANG_EXTERNAL_CFLAGS_NO_OVERRIDE)
diff --git a/core/board_config.mk b/core/board_config.mk
index 70c91a8..b1b7b81 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -188,6 +188,7 @@
   BUILD_BROKEN_PREBUILT_ELF_FILES \
   BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW \
   BUILD_BROKEN_USES_NETWORK \
+  BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES \
   BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
   BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
 
@@ -995,19 +996,13 @@
 endif
 
 ###########################################
-# APEXes are by default flattened, i.e. non-updatable, if not building unbundled
-# apps. It can be unflattened (and updatable) by inheriting from
-# updatable_apex.mk
+# APEXes are by default not flattened, i.e. updatable.
 #
 # APEX flattening can also be forcibly enabled (resp. disabled) by
 # setting OVERRIDE_TARGET_FLATTEN_APEX to true (resp. false), e.g. by
 # setting the OVERRIDE_TARGET_FLATTEN_APEX environment variable.
 ifdef OVERRIDE_TARGET_FLATTEN_APEX
   TARGET_FLATTEN_APEX := $(OVERRIDE_TARGET_FLATTEN_APEX)
-else
-  ifeq (,$(TARGET_BUILD_APPS)$(TARGET_FLATTEN_APEX))
-    TARGET_FLATTEN_APEX := true
-  endif
 endif
 
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
diff --git a/core/clang/OWNERS b/core/clang/OWNERS
deleted file mode 100644
index d41d3fc..0000000
--- a/core/clang/OWNERS
+++ /dev/null
@@ -1,4 +0,0 @@
-chh@google.com
-pirama@google.com
-srhines@google.com
-yikong@google.com
diff --git a/core/clang/config.mk b/core/clang/config.mk
index 28a75ec..d03c541 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -2,7 +2,7 @@
 
 LLVM_READOBJ := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin/llvm-readobj
 
-LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
+LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_BASE)/linux-x86/$(LLVM_PREBUILTS_VERSION)/lib/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
 
 define convert-to-clang-flags
 $(strip $(filter-out $(CLANG_CONFIG_UNKNOWN_CFLAGS),$(1)))
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index e325760..8913ad0 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -293,6 +293,7 @@
 LOCAL_SOONG_LICENSE_METADATA :=
 LOCAL_SOONG_LINK_TYPE :=
 LOCAL_SOONG_LINT_REPORTS :=
+LOCAL_SOONG_MODULE_TYPE :=
 LOCAL_SOONG_PROGUARD_DICT :=
 LOCAL_SOONG_PROGUARD_USAGE_ZIP :=
 LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
diff --git a/core/combo/arch/arm64/armv9-a.mk b/core/combo/arch/arm64/armv9-a.mk
new file mode 100644
index 0000000..de0760a
--- /dev/null
+++ b/core/combo/arch/arm64/armv9-a.mk
@@ -0,0 +1,19 @@
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# .mk file required to support build for the new armv9-a Arm64 arch
+# variant. The file just needs to be present but does not require to contain
+# anything
diff --git a/core/config.mk b/core/config.mk
index f5bb7ef..a6266b8 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -232,6 +232,7 @@
 BUILD_FUZZ_TEST :=$= $(BUILD_SYSTEM)/fuzz_test.mk
 
 BUILD_NOTICE_FILE :=$= $(BUILD_SYSTEM)/notice_files.mk
+BUILD_SBOM_GEN :=$= $(BUILD_SYSTEM)/sbom.mk
 
 include $(BUILD_SYSTEM)/deprecation.mk
 
@@ -431,6 +432,9 @@
 $(hide) $(HOST_NM) -gP $(1) | cut -f1-2 -d" " | (grep -v U$$ >> $(2) || true)
 endef
 
+# Pick a Java compiler.
+include $(BUILD_SYSTEM)/combo/javac.mk
+
 ifeq ($(CALLED_FROM_SETUP),true)
 include $(BUILD_SYSTEM)/ccache.mk
 include $(BUILD_SYSTEM)/goma.mk
@@ -453,9 +457,6 @@
   WITH_TIDY_ONLY :=
 endif
 
-# Pick a Java compiler.
-include $(BUILD_SYSTEM)/combo/javac.mk
-
 # ---------------------------------------------------------------
 # Check that the configuration is current.  We check that
 # BUILD_ENV_SEQUENCE_NUMBER is current against this value.
@@ -581,7 +582,6 @@
 endif
 PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
 NANOPB_SRCS := $(HOST_OUT_EXECUTABLES)/protoc-gen-nanopb
-VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
 MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
 MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
 LZ4 := $(HOST_OUT_EXECUTABLES)/lz4$(HOST_EXECUTABLE_SUFFIX)
@@ -618,7 +618,11 @@
 LPMAKE := $(HOST_OUT_EXECUTABLES)/lpmake$(HOST_EXECUTABLE_SUFFIX)
 ADD_IMG_TO_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/add_img_to_target_files$(HOST_EXECUTABLE_SUFFIX)
 BUILD_IMAGE := $(HOST_OUT_EXECUTABLES)/build_image$(HOST_EXECUTABLE_SUFFIX)
+ifeq (,$(strip $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)))
 BUILD_SUPER_IMAGE := $(HOST_OUT_EXECUTABLES)/build_super_image$(HOST_EXECUTABLE_SUFFIX)
+else
+BUILD_SUPER_IMAGE := $(BOARD_CUSTOM_BUILD_SUPER_IMAGE)
+endif
 IMG_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/img_from_target_files$(HOST_EXECUTABLE_SUFFIX)
 MAKE_RECOVERY_PATCH := $(HOST_OUT_EXECUTABLES)/make_recovery_patch$(HOST_EXECUTABLE_SUFFIX)
 OTA_FROM_TARGET_FILES := $(HOST_OUT_EXECUTABLES)/ota_from_target_files$(HOST_EXECUTABLE_SUFFIX)
@@ -641,6 +645,8 @@
 DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump$(BUILD_EXECUTABLE_SUFFIX)
 PROFMAN := $(HOST_OUT_EXECUTABLES)/profman
 
+GEN_SBOM := $(HOST_OUT_EXECUTABLES)/generate-sbom
+
 FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
 FINDBUGS := $(FINDBUGS_DIR)/findbugs
 
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 88ec47f..62c3ba3 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -93,6 +93,7 @@
 
 $(call dist-for-goals, droidcore, $(boot_zip))
 
+ifneq (,$(filter true,$(ART_MODULE_BUILD_FROM_SOURCE) $(MODULE_BUILD_FROM_SOURCE)))
 # Build the system_server.zip which contains the Apex system server jars and standalone system server jars
 system_server_zip := $(PRODUCT_OUT)/system_server.zip
 apex_system_server_jars := \
@@ -122,5 +123,6 @@
 
 $(call dist-for-goals, droidcore, $(system_server_zip))
 
+endif  #ART_MODULE_BUILD_FROM_SOURCE || MODULE_BUILD_FROM_SOURCE
 endif  #PRODUCT_USES_DEFAULT_ART_CONFIG
 endif  #WITH_DEXPREOPT
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index b303b52..d498875 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -272,12 +272,13 @@
 my_dexpreopt_images_deps :=
 my_dexpreopt_image_locations_on_host :=
 my_dexpreopt_image_locations_on_device :=
-# Infix can be 'boot' or 'art'. Soong creates a set of variables for Make, one
-# for each boot image (primary and the framework extension). The only reason why
-# the primary image is exposed to Make is testing (art gtests) and benchmarking
-# (art golem benchmarks). Install rules that use those variables are in
-# dex_preopt_libart.mk. Here for dexpreopt purposes the infix is always 'boot'.
-my_dexpreopt_infix := boot
+# Infix can be 'art', 'boot', or 'mainline'. Soong creates a set of variables
+# for Make, one or each boot image (primary, the framework extension, and the
+# mainline extension). The only reason why the primary image is exposed to Make
+# is testing (art gtests) and benchmarking (art golem benchmarks). Install rules
+# that use those variables are in dex_preopt_libart.mk. Here for dexpreopt
+# purposes the infix is always 'boot' or 'mainline'.
+my_dexpreopt_infix := $(if $(filter true,$(DEX_PREOPT_WITH_UPDATABLE_BCP)),mainline,boot)
 my_create_dexpreopt_config :=
 
 ifdef LOCAL_DEX_PREOPT
@@ -447,6 +448,7 @@
 
   my_dexpreopt_script := $(intermediates)/dexpreopt.sh
   my_dexpreopt_zip := $(intermediates)/dexpreopt.zip
+  DEXPREOPT.$(LOCAL_MODULE).POST_INSTALLED_DEXPREOPT_ZIP := $(my_dexpreopt_zip)
   .KATI_RESTAT: $(my_dexpreopt_script)
   $(my_dexpreopt_script): PRIVATE_MODULE := $(LOCAL_MODULE)
   $(my_dexpreopt_script): PRIVATE_GLOBAL_SOONG_CONFIG := $(DEX_PREOPT_SOONG_CONFIG_FOR_MAKE)
@@ -506,4 +508,4 @@
   my_dexpreopt_zip :=
   my_dexpreopt_config_for_postprocessing :=
 endif # LOCAL_DEX_PREOPT
-endif # my_create_dexpreopt_config
\ No newline at end of file
+endif # my_create_dexpreopt_config
diff --git a/core/main.mk b/core/main.mk
index 3866037..22a69d8 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1345,6 +1345,13 @@
                   $(if $(ALL_MODULES.$(m).INSTALLED),\
                     $(if $(filter-out $(HOST_OUT_ROOT)/%,$(ALL_MODULES.$(m).INSTALLED)),,\
                       $(m))))
+    ifeq ($(TARGET_ARCH),riscv64)
+      # HACK: riscv64 can't build the device version of bcc and ld.mc due to a
+      # dependency on an old version of LLVM, but they are listed in
+      # base_system.mk which can't add them conditionally based on the target
+      # architecture.
+      _host_modules := $(filter-out bcc ld.mc,$(_host_modules))
+    endif
     $(call maybe-print-list-and-error,$(sort $(_host_modules)),\
       Host modules should be in PRODUCT_HOST_PACKAGES$(comma) not PRODUCT_PACKAGES)
   endif
@@ -2019,6 +2026,84 @@
 # missing dependency errors.
 $(call build-license-metadata)
 
+# Generate SBOM in SPDX format
+product_copy_files_without_owner := $(foreach pcf,$(PRODUCT_COPY_FILES),$(call word-colon,1,$(pcf)):$(call word-colon,2,$(pcf)))
+ifeq ($(TARGET_BUILD_APPS),)
+dest_files_without_source := $(sort $(foreach pcf,$(product_copy_files_without_owner),$(if $(wildcard $(call word-colon,1,$(pcf))),,$(call word-colon,2,$(pcf)))))
+dest_files_without_source := $(addprefix $(PRODUCT_OUT)/,$(dest_files_without_source))
+installed_files := $(sort $(filter-out $(PRODUCT_OUT)/apex/% $(PRODUCT_OUT)/fake_packages/% $(PRODUCT_OUT)/testcases/% $(dest_files_without_source),$(filter $(PRODUCT_OUT)/%,$(modules_to_install))))
+else
+installed_files := $(apps_only_installed_files)
+endif
+
+# sbom-metadata.csv contains all raw data collected in Make for generating SBOM in generate-sbom.py.
+# There are multiple columns and each identifies the source of an installed file for a specific case.
+# The columns and their uses are described as below:
+#   installed_file: the file path on device, e.g. /product/app/Browser2/Browser2.apk
+#   module_path: the path of the module that generates the installed file, e.g. packages/apps/Browser2
+#   soong_module_type: Soong module type, e.g. android_app, cc_binary
+#   is_prebuilt_make_module: Y, if the installed file is from a prebuilt Make module, see prebuilt_internal.mk
+#   product_copy_files: the installed file is from variable PRODUCT_COPY_FILES, e.g. device/google/cuttlefish/shared/config/init.product.rc:product/etc/init/init.rc
+#   kernel_module_copy_files: the installed file is from variable KERNEL_MODULE_COPY_FILES, similar to product_copy_files
+#   is_platform_generated: this is an aggregated value including some small cases instead of adding more columns. It is set to Y if any case is Y
+#       is_build_prop: build.prop in each partition, see sysprop.mk.
+#       is_notice_file: NOTICE.xml.gz in each partition, see Makefile.
+#       is_dexpreopt_image_profile: see the usage of DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED in Soong and Make
+#       is_product_system_other_avbkey: see INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET
+#       is_system_other_odex_marker: see INSTALLED_SYSTEM_OTHER_ODEX_MARKER
+#       is_event_log_tags_file: see variable event_log_tags_file in Makefile
+#       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
+#       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
+#       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
+
+# (TODO: b/272358583 find another way of always rebuilding this target)
+# Remove the sbom-metadata.csv whenever makefile is evaluated
+$(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
+$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
+	rm -f $@
+	@echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated >> $@
+	$(foreach f,$(installed_files),\
+	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
+	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
+	  $(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
+	  $(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
+	  $(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
+	  $(eval _post_installed_dexpreopt_zip := $(DEXPREOPT.$(_module_name).POST_INSTALLED_DEXPREOPT_ZIP)) \
+	  $(eval _product_copy_files := $(sort $(filter %:$(_path_on_device),$(product_copy_files_without_owner)))) \
+	  $(eval _kernel_module_copy_files := $(sort $(filter %$(_path_on_device),$(KERNEL_MODULE_COPY_FILES)))) \
+	  $(eval _is_build_prop := $(call is-build-prop,$f)) \
+	  $(eval _is_notice_file := $(call is-notice-file,$f)) \
+	  $(eval _is_dexpreopt_image_profile := $(if $(filter %:/$(_path_on_device),$(DEXPREOPT_IMAGE_PROFILE_BUILT_INSTALLED)),Y)) \
+	  $(eval _is_product_system_other_avbkey := $(if $(findstring $f,$(INSTALLED_PRODUCT_SYSTEM_OTHER_AVBKEY_TARGET)),Y)) \
+	  $(eval _is_event_log_tags_file := $(if $(findstring $f,$(event_log_tags_file)),Y)) \
+	  $(eval _is_system_other_odex_marker := $(if $(findstring $f,$(INSTALLED_SYSTEM_OTHER_ODEX_MARKER)),Y)) \
+	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
+	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
+	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)) \
+	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ $(newline) \
+	  $(if $(_post_installed_dexpreopt_zip), \
+	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ ; done $(newline) \
+	  ) \
+	)
+
+.PHONY: sbom
+ifeq ($(TARGET_BUILD_APPS),)
+sbom: $(PRODUCT_OUT)/sbom.spdx.json
+$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
+$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr=$(PRODUCT_MANUFACTURER) --json
+
+else
+apps_only_sbom_files := $(sort $(patsubst %,%.spdx,$(apps_only_installed_files)))
+$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
+	rm -rf $@
+	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr=$(PRODUCT_MANUFACTURER) --unbundled
+
+sbom: $(apps_only_sbom_files)
+endif
+
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
 
 $(info [$(call inc_and_print,subdir_makefiles_inc)/$(subdir_makefiles_total)] writing build rules ...)
diff --git a/core/os_licensing.mk b/core/os_licensing.mk
index db7c422..1e1b7df 100644
--- a/core/os_licensing.mk
+++ b/core/os_licensing.mk
@@ -21,8 +21,8 @@
 	$(copy-file-to-target)
 endif
 
-$(call declare-0p-target,$(target_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_notice_html_or_xml_gz))
+$(call declare-1p-target,$(target_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_notice_html_or_xml_gz))
 endif
 
 .PHONY: vendorlicense
@@ -43,8 +43,8 @@
 $(installed_vendor_notice_xml_gz): $(target_vendor_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_notice_xml_gz))
 endif
 
 .PHONY: odmlicense
@@ -62,8 +62,8 @@
 $(installed_odm_notice_xml_gz): $(target_odm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_notice_xml_gz))
 endif
 
 .PHONY: oemlicense
@@ -84,8 +84,8 @@
 $(installed_product_notice_xml_gz): $(target_product_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_product_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_product_notice_xml_gz))
+$(call declare-1p-target,$(target_product_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_product_notice_xml_gz))
 endif
 
 .PHONY: systemextlicense
@@ -103,8 +103,8 @@
 $(installed_system_ext_notice_xml_gz): $(target_system_ext_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_ext_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_system_ext_notice_xml_gz))
+$(call declare-1p-target,$(target_system_ext_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_system_ext_notice_xml_gz))
 endif
 
 .PHONY: vendor_dlkmlicense
@@ -122,8 +122,8 @@
 $(installed_vendor_dlkm_notice_xml_gz): $(target_vendor_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_vendor_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_vendor_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_vendor_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_vendor_dlkm_notice_xml_gz))
 endif
 
 .PHONY: odm_dlkmlicense
@@ -141,8 +141,8 @@
 $(installed_odm_dlkm_notice_xml_gz): $(target_odm_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_odm_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_odm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_odm_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_odm_dlkm_notice_xml_gz))
 endif
 
 .PHONY: system_dlkmlicense
@@ -160,8 +160,8 @@
 $(installed_system_dlkm_notice_xml_gz): $(target_system_dlkm_notice_file_xml_gz)
 	$(copy-file-to-target)
 
-$(call declare-0p-target,$(target_system_dlkm_notice_file_xml_gz))
-$(call declare-0p-target,$(installed_sysetm_dlkm_notice_xml_gz))
+$(call declare-1p-target,$(target_system_dlkm_notice_file_xml_gz))
+$(call declare-1p-target,$(installed_sysetm_dlkm_notice_xml_gz))
 endif
 
 endif # not TARGET_BUILD_APPS
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index ef1471d..5bea9b6 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -57,6 +57,9 @@
   $(error $(LOCAL_MODULE) : unexpected LOCAL_MODULE_CLASS for prebuilts: $(LOCAL_MODULE_CLASS))
 endif
 
+$(if $(filter-out $(SOONG_ANDROID_MK),$(LOCAL_MODULE_MAKEFILE)), \
+  $(eval ALL_MODULES.$(my_register_name).IS_PREBUILT_MAKE_MODULE := Y))
+
 $(built_module) : $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
diff --git a/core/product.mk b/core/product.mk
index 3b22314..f4d5a4f 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -24,6 +24,8 @@
 
 _product_single_value_vars += PRODUCT_NAME
 _product_single_value_vars += PRODUCT_MODEL
+_product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
+_product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
 
 # The resoure configuration options to use for this product.
 _product_list_vars += PRODUCT_LOCALES
@@ -43,6 +45,7 @@
 _product_single_value_vars += PRODUCT_DEVICE
 _product_single_value_vars += PRODUCT_MANUFACTURER
 _product_single_value_vars += PRODUCT_BRAND
+_product_single_value_vars += PRODUCT_BRAND_FOR_ATTESTATION
 
 # These PRODUCT_SYSTEM_* flags, if defined, are used in place of the
 # corresponding PRODUCT_* flags for the sysprops on /system.
diff --git a/core/product_config.rbc b/core/product_config.rbc
index da8209b..97c1d00 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -462,6 +462,9 @@
 
 def __words(string_or_list):
     if type(string_or_list) == "list":
+        for x in string_or_list:
+            if type(x) != "string":
+                return string_or_list
         string_or_list = " ".join(string_or_list)
     return _mkstrip(string_or_list).split()
 
diff --git a/core/proguard.flags b/core/proguard.flags
index 53f63d8..d790061 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -9,14 +9,19 @@
 # Add this flag in your package's own configuration if it's needed.
 #-flattenpackagehierarchy
 
-# Keep classes and methods that have @VisibleForTesting annotations, except in
-# intermediate libraries that export those annotations (e.g., androidx, guava).
-# This avoids keeping library-specific test code that isn't actually needed
-# for platform testing.
+# Keep classes and members with the platform-defined @VisibleForTesting annotation.
+-keep @com.android.internal.annotations.VisibleForTesting class *
+-keepclassmembers class * {
+    @com.android.internal.annotations.VisibleForTesting *;
+}
+
+# Keep classes and members with non-platform @VisibleForTesting annotations, but
+# only within platform-defined packages. This avoids keeping external, library-specific
+# test code that isn't actually needed for platform testing.
 # TODO(b/239961360): Migrate away from androidx.annotation.VisibleForTesting
 # and com.google.common.annotations.VisibleForTesting use in platform code.
--keep @**.VisibleForTesting class !androidx.**,!com.google.common.**,*
--keepclassmembers class !androidx.**,!com.google.common.**,* {
+-keep @**.VisibleForTesting class android.**,com.android.**,com.google.android.**
+-keepclassmembers class android.**,com.android.**,com.google.android.** {
     @**.VisibleForTesting *;
 }
 
diff --git a/core/python_binary_host_mobly_test_config_template.xml b/core/python_binary_host_mobly_test_config_template.xml
new file mode 100644
index 0000000..a6576cd
--- /dev/null
+++ b/core/python_binary_host_mobly_test_config_template.xml
@@ -0,0 +1,25 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Copyright (C) 2023 The Android Open Source Project
+     Licensed under the Apache License, Version 2.0 (the "License");
+     you may not use this file except in compliance with the License.
+     You may obtain a copy of the License at
+          http://www.apache.org/licenses/LICENSE-2.0
+     Unless required by applicable law or agreed to in writing, software
+     distributed under the License is distributed on an "AS IS" BASIS,
+     WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+     See the License for the specific language governing permissions and
+     limitations under the License.
+-->
+<configuration description="Config for {MODULE} mobly test">
+    {EXTRA_CONFIGS}
+
+    <device name="device1"></device>
+    <device name="device2"></device>
+
+    <test class="com.android.tradefed.testtype.mobly.MoblyBinaryHostTest">
+      <!-- The mobly-par-file-name should match the module name -->
+      <option name="mobly-par-file-name" value="{MODULE}" />
+      <!-- Timeout limit in milliseconds for all test cases of the python binary -->
+      <option name="mobly-test-timeout" value="300000" />
+    </test>
+</configuration>
diff --git a/core/rbe.mk b/core/rbe.mk
index 65abde5..6754b0a 100644
--- a/core/rbe.mk
+++ b/core/rbe.mk
@@ -81,11 +81,11 @@
   endif
 
   ifdef RBE_R8
-    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/r8-compat-proguard.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(JAVA))
+    R8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=r8 --exec_strategy=$(r8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/host/linux-x86/framework/r8.jar,build/make/core/proguard_basic_keeps.flags --toolchain_inputs=$(firstword $(JAVA)))
   endif
 
   ifdef RBE_D8
-    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/soong/host/linux-x86/framework/d8.jar --toolchain_inputs=$(JAVA))
+    D8_WRAPPER := $(strip $(RBE_WRAPPER) --labels=type=compile,compiler=d8 --exec_strategy=$(d8_exec_strategy) --platform=$(java_r8_d8_platform) --inputs=$(OUT_DIR)/host/linux-x86/framework/d8.jar --toolchain_inputs=$(firstword $(JAVA)))
   endif
 
   rbe_dir :=
diff --git a/core/sbom.mk b/core/sbom.mk
new file mode 100644
index 0000000..e23bbc1
--- /dev/null
+++ b/core/sbom.mk
@@ -0,0 +1,11 @@
+# For SBOM generation
+# This is included by base_rules.mk and is not necessary to be included in other .mk files
+# unless a .mk file changes its installed file after including base_rules.mk.
+
+ifdef my_register_name
+  ifneq (, $(strip $(ALL_MODULES.$(my_register_name).INSTALLED)))
+    $(foreach installed_file,$(ALL_MODULES.$(my_register_name).INSTALLED),\
+      $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name))\
+    )
+  endif
+endif
\ No newline at end of file
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index 786a755..dd550b5 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -162,19 +162,21 @@
 # embedded JNI will already have been handled by soong
 my_embed_jni :=
 my_prebuilt_jni_libs :=
-ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
-  my_2nd_arch_prefix :=
-  LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
-  partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
-  include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
-endif
-ifdef TARGET_2ND_ARCH
-  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
-    my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
-    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
-    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
+  ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH)
+    my_2nd_arch_prefix :=
+    LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_ARCH))
+    partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_ARCH))
     include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
   endif
+  ifdef TARGET_2ND_ARCH
+    ifdef LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH)
+      my_2nd_arch_prefix := $(TARGET_2ND_ARCH_VAR_PREFIX)
+      LOCAL_JNI_SHARED_LIBRARIES := $(LOCAL_SOONG_JNI_LIBS_$(TARGET_2ND_ARCH))
+      partition_lib_pairs :=  $(LOCAL_SOONG_JNI_LIBS_PARTITION_$(TARGET_2ND_ARCH))
+      include $(BUILD_SYSTEM)/install_jni_libs_internal.mk
+    endif
+  endif
 endif
 LOCAL_SHARED_JNI_LIBRARIES :=
 my_embed_jni :=
@@ -267,3 +269,8 @@
 endif
 
 SOONG_ALREADY_CONV += $(LOCAL_MODULE)
+
+###########################################################
+## SBOM generation
+###########################################################
+include $(BUILD_SBOM_GEN)
\ No newline at end of file
diff --git a/core/soong_config.mk b/core/soong_config.mk
index e6c4d64..0101796 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -266,6 +266,10 @@
 
 $(call add_json_bool, CompressedApex, $(filter true,$(PRODUCT_COMPRESSED_APEX)))
 
+ifndef APEX_BUILD_FOR_PRE_S_DEVICES
+$(call add_json_bool, TrimmedApex, $(filter true,$(PRODUCT_TRIMMED_APEX)))
+endif
+
 $(call add_json_bool, BoardUsesRecoveryAsBoot, $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 
 $(call add_json_list, BoardKernelBinaries, $(BOARD_KERNEL_BINARIES))
@@ -282,6 +286,7 @@
 $(call add_json_bool, BuildBrokenDepfile,                 $(filter true,$(BUILD_BROKEN_DEPFILE)))
 $(call add_json_bool, BuildBrokenEnforceSyspropOwner,     $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
 $(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
+$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
 $(call add_json_bool, BuildBrokenVendorPropertyNamespace, $(filter true,$(BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE)))
 $(call add_json_list, BuildBrokenInputDirModules, $(BUILD_BROKEN_INPUT_DIR_MODULES))
 
diff --git a/core/sysprop.mk b/core/sysprop.mk
index b51818a..bd6f3d9 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -46,6 +46,10 @@
         echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
         echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
         echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
+        # Attestation specific properties for AOSP/GSI build running on device.
+        echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
+        echo "ro.product.brand_for_attestation=$(PRODUCT_BRAND_FOR_ATTESTATION)" >> $(2);\
+        echo "ro.product.name_for_attestation=$(PRODUCT_NAME_FOR_ATTESTATION)" >> $(2);\
     )\
     $(if $(filter true,$(ZYGOTE_FORCE_64)),\
         $(if $(filter vendor,$(1)),\
@@ -137,7 +141,7 @@
 	    fi;)
 	$(hide) echo "# end of file" >> $$@
 
-$(call declare-0p-target,$(2))
+$(call declare-1p-target,$(2))
 endef
 
 # -----------------------------------------------------------------
@@ -539,3 +543,19 @@
     $(empty)))
 
 $(eval $(call declare-1p-target,$(INSTALLED_RAMDISK_BUILD_PROP_TARGET)))
+
+ALL_INSTALLED_BUILD_PROP_FILES := \
+  $(INSTALLED_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_BUILD_PROP_TARGET) \
+  $(INSTALLED_PRODUCT_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_BUILD_PROP_TARGET) \
+  $(INSTALLED_VENDOR_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_ODM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_DLKM_BUILD_PROP_TARGET) \
+  $(INSTALLED_SYSTEM_EXT_BUILD_PROP_TARGET) \
+  $(INSTALLED_RAMDISK_BUILD_PROP_TARGET)
+
+# $1 installed file path, e.g. out/target/product/vsoc_x86_64/system/build.prop
+define is-build-prop
+$(if $(findstring $1,$(ALL_INSTALLED_BUILD_PROP_FILES)),Y)
+endef
\ No newline at end of file
diff --git a/core/tasks/OWNERS b/core/tasks/OWNERS
deleted file mode 100644
index 372ff8b..0000000
--- a/core/tasks/OWNERS
+++ /dev/null
@@ -1,2 +0,0 @@
-per-file art-host-tests.mk = dshi@google.com,dsrbecky@google.com,jdesprez@google.com,rpl@google.com
-per-file catbox.mk = smara@google.com,schinchalkar@google.com,kaneesh@google.com
diff --git a/core/tasks/art-host-tests.mk b/core/tasks/art-host-tests.mk
index 2af1ded..ff9eb09 100644
--- a/core/tasks/art-host-tests.mk
+++ b/core/tasks/art-host-tests.mk
@@ -24,25 +24,55 @@
     $(eval _cmf_src := $(word 1,$(_cmf_tuple))) \
     $(_cmf_src)))
 
-$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+# Create an artifact to include a list of test config files in art-host-tests.
+art_host_tests_list_zip := $(PRODUCT_OUT)/art-host-tests_list.zip
+# Create an artifact to include all test config files in art-host-tests.
+art_host_tests_configs_zip := $(PRODUCT_OUT)/art-host-tests_configs.zip
+# Create an artifact to include all shared library files in art-host-tests.
+art_host_tests_host_shared_libs_zip := $(PRODUCT_OUT)/art-host-tests_host-shared-libs.zip
 
+$(art_host_tests_zip) : PRIVATE_HOST_SHARED_LIBS := $(my_host_shared_lib_for_art_host_tests)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_list_zip := $(art_host_tests_list_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_configs_zip := $(art_host_tests_configs_zip)
+$(art_host_tests_zip) : PRIVATE_art_host_tests_host_shared_libs_zip := $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : .KATI_IMPLICIT_OUTPUTS := $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip)
+$(art_host_tests_zip) : PRIVATE_INTERMEDIATES_DIR := $(intermediates_dir)
 $(art_host_tests_zip) : $(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests) $(SOONG_ZIP)
-	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $@.list
-	grep $(HOST_OUT_TESTCASES) $@.list > $@-host.list || true
-	$(hide) touch $@-host-libs.list
+	rm -rf $(PRIVATE_INTERMEDIATES_DIR)
+	rm -f $@ $(PRIVATE_art_host_tests_list_zip)
+	mkdir -p $(PRIVATE_INTERMEDIATES_DIR)
+	echo $(sort $(COMPATIBILITY.art-host-tests.FILES)) | tr " " "\n" > $(PRIVATE_INTERMEDIATES_DIR)/list
+	grep $(HOST_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/host.list || true
+	$(hide) touch $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
 	$(hide) for shared_lib in $(PRIVATE_HOST_SHARED_LIBS); do \
-	  echo $$shared_lib >> $@-host-libs.list; \
+	  echo $$shared_lib >> $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list; \
 	done
-	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
-	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
-	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
+	grep $(TARGET_OUT_TESTCASES) $(PRIVATE_INTERMEDIATES_DIR)/list > $(PRIVATE_INTERMEDIATES_DIR)/target.list || true
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
+	  -P host/testcases -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list > $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list || true
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list > $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_configs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
+	grep $(HOST_OUT) $(PRIVATE_INTERMEDIATES_DIR)/shared-libs.list > $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list || true
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_host_shared_libs_zip) \
+	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-shared-libs.list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/host.list | sed s%$(HOST_OUT)%host%g > $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	grep -e .*\\.config$$ $(PRIVATE_INTERMEDIATES_DIR)/target.list | sed s%$(PRODUCT_OUT)%target%g >> $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
+	$(hide) $(SOONG_ZIP) -d -o $(PRIVATE_art_host_tests_list_zip) -C $(PRIVATE_INTERMEDIATES_DIR) -f $(PRIVATE_INTERMEDIATES_DIR)/art-host-tests_list
 
 art-host-tests: $(art_host_tests_zip)
-$(call dist-for-goals, art-host-tests, $(art_host_tests_zip))
+$(call dist-for-goals, art-host-tests, $(art_host_tests_zip) $(art_host_tests_list_zip) $(art_host_tests_configs_zip) $(art_host_tests_host_shared_libs_zip))
 
 $(call declare-1p-container,$(art_host_tests_zip),)
 $(call declare-container-license-deps,$(art_host_tests_zip),$(COMPATIBILITY.art-host-tests.FILES) $(my_host_shared_lib_for_art_host_tests),$(PRODUCT_OUT)/:/)
 
 tests: art-host-tests
+
+intermediates_dir :=
+art_host_tests_zip :=
+art_host_tests_list_zip :=
+art_host_tests_configs_zip :=
+art_host_tests_host_shared_libs_zip :=
diff --git a/core/tasks/device-tests.mk b/core/tasks/device-tests.mk
index 3196f52..4167a7e 100644
--- a/core/tasks/device-tests.mk
+++ b/core/tasks/device-tests.mk
@@ -39,7 +39,7 @@
 	grep $(HOST_OUT_TESTCASES) $@-shared-libs.list > $@-host-shared-libs.list || true
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	grep -e .*\\.config$$ $@-target.list > $@-target-test-configs.list || true
-	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list
+	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list -P target -C $(PRODUCT_OUT) -l $@-target.list -sha256
 	$(hide) $(SOONG_ZIP) -d -o $(device-tests-configs-zip) \
 	  -P host -C $(HOST_OUT) -l $@-host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target-test-configs.list
diff --git a/core/tasks/general-tests.mk b/core/tasks/general-tests.mk
index 5726ee2..8dbc76f 100644
--- a/core/tasks/general-tests.mk
+++ b/core/tasks/general-tests.mk
@@ -87,7 +87,8 @@
 	$(SOONG_ZIP) -d -o $@ \
 	  -P host -C $(PRIVATE_INTERMEDIATES_DIR) -D $(PRIVATE_INTERMEDIATES_DIR)/tools \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host.list \
-	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list
+	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target.list \
+	  -sha256
 	$(SOONG_ZIP) -d -o $(PRIVATE_general_tests_configs_zip) \
 	  -P host -C $(HOST_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/host-test-configs.list \
 	  -P target -C $(PRODUCT_OUT) -l $(PRIVATE_INTERMEDIATES_DIR)/target-test-configs.list
diff --git a/core/tasks/host-unit-tests.mk b/core/tasks/host-unit-tests.mk
index ed2f2a6..733a2e2 100644
--- a/core/tasks/host-unit-tests.mk
+++ b/core/tasks/host-unit-tests.mk
@@ -41,7 +41,7 @@
 	grep $(TARGET_OUT_TESTCASES) $@.list > $@-target.list || true
 	$(hide) $(SOONG_ZIP) -d -o $@ -P host -C $(HOST_OUT) -l $@-host.list \
 	  -P target -C $(PRODUCT_OUT) -l $@-target.list \
-	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list
+	  -P host/testcases -C $(HOST_OUT) -l $@-host-libs.list -sha256
 	rm -f $@.list $@-host.list $@-target.list $@-host-libs.list
 
 host-unit-tests: $(host_unit_tests_zip)
diff --git a/core/tasks/owners.mk b/core/tasks/owners.mk
index 806b8ee..29f3c44 100644
--- a/core/tasks/owners.mk
+++ b/core/tasks/owners.mk
@@ -11,7 +11,7 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
-# Create an artifact to include TEST_MAPPING files in source tree.
+# Create an artifact to include OWNERS files in source tree.
 
 .PHONY: owners
 
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index a5f162a..9400890 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -51,7 +51,7 @@
 $(test_suite_jdk): PRIVATE_SUBDIR := $(test_suite_subdir)
 $(test_suite_jdk): $(shell find $(test_suite_jdk_dir) -type f | sort)
 $(test_suite_jdk): $(SOONG_ZIP)
-	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR)
+	$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR) -sha256
 
 $(call declare-license-metadata,$(test_suite_jdk),SPDX-license-identifier-GPL-2.0-with-classpath-exception,permissive,\
   $(test_suite_jdk_dir)/legal/java.base/LICENSE,JDK,prebuilts/jdk/$(notdir $(patsubst %/,%,$(dir $(test_suite_jdk_dir)))))
@@ -123,7 +123,7 @@
 	cp $(PRIVATE_TOOLS) $(PRIVATE_OUT_DIR)/tools
 	$(if $(PRIVATE_DYNAMIC_CONFIG),$(hide) cp $(PRIVATE_DYNAMIC_CONFIG) $(PRIVATE_OUT_DIR)/testcases/$(PRIVATE_SUITE_NAME).dynamic)
 	find $(PRIVATE_RESOURCES) | sort >$@.list
-	$(SOONG_ZIP) -d -o $@.tmp -C $(dir $@) -l $@.list
+	$(SOONG_ZIP) -d -o $@.tmp -C $(dir $@) -l $@.list -sha256
 	$(MERGE_ZIPS) $@ $@.tmp $(PRIVATE_JDK)
 	rm -f $@.tmp
 # Build a list of tests
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index e6a96ff..3533851 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -103,7 +103,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-01-05
+    PLATFORM_SECURITY_PATCH := 2023-03-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/envsetup.sh b/envsetup.sh
index ab86d5f..905635c 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -297,7 +297,22 @@
     if [ -n $ANDROID_PYTHONPATH ]; then
         export PYTHONPATH=${PYTHONPATH//$ANDROID_PYTHONPATH/}
     fi
-    export ANDROID_PYTHONPATH=$T/development/python-packages:
+    # //development/python-packages contains both a pseudo-PYTHONPATH which
+    # mimics an already assembled venv, but also contains real Python packages
+    # that are not in that layout until they are installed. We can fake it for
+    # the latter type by adding the package source directories to the PYTHONPATH
+    # directly. For the former group, we only need to add the python-packages
+    # directory itself.
+    #
+    # This could be cleaned up by converting the remaining packages that are in
+    # the first category into a typical python source layout (that is, another
+    # layer of directory nesting) and automatically adding all subdirectories of
+    # python-packages to the PYTHONPATH instead of manually curating this. We
+    # can't convert the packages like adb to the other style because doing so
+    # would prevent exporting type info from those packages.
+    #
+    # http://b/266688086
+    export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages:
     if [ -n $VENDOR_PYTHONPATH ]; then
         ANDROID_PYTHONPATH=$ANDROID_PYTHONPATH$VENDOR_PYTHONPATH
     fi
@@ -1195,7 +1210,7 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto)' \
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|hpp|S|java|kt|xml|sh|mk|aidl|vts|proto|rs|go)' \
                 -exec grep --color -n "$@" {} +
         }
 
@@ -1203,7 +1218,7 @@
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\)' \
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|hpp\|S\|java\|kt\|xml\|sh\|mk\|aidl\|vts\|proto\|rs\|go\)' \
                 -exec grep --color -n "$@" {} +
         }
         ;;
@@ -1866,11 +1881,6 @@
         color_reset=""
     fi
 
-    if [[ "x${USE_RBE}" == "x" && $mins -gt 15 && "${ANDROID_BUILD_ENVIRONMENT_CONFIG}" == "googler" ]]; then
-        echo
-        echo "${color_warning}Start using RBE (http://go/build-fast) to get faster builds!${color_reset}"
-    fi
-
     echo
     if [ $ret -eq 0 ] ; then
         echo -n "${color_success}#### build completed successfully "
diff --git a/finalize-aidl-vndk-sdk-resources.sh b/finalize-aidl-vndk-sdk-resources.sh
deleted file mode 100755
index f03fb43..0000000
--- a/finalize-aidl-vndk-sdk-resources.sh
+++ /dev/null
@@ -1,81 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_aidl_vndk_sdk_resources() {
-    local PLATFORM_CODENAME='UpsideDownCake'
-    local PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
-    local PLATFORM_SDK_VERSION='34'
-    local PLATFORM_VERSION='14'
-
-    local SDK_CODENAME="public static final int $PLATFORM_CODENAME_JAVA = CUR_DEVELOPMENT;"
-    local SDK_VERSION="public static final int $PLATFORM_CODENAME_JAVA = $PLATFORM_SDK_VERSION;"
-
-    local top="$(dirname "$0")"/../..
-
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # This script is WIP and only finalizes part of the Android branch for release.
-    # The full process can be found at (INTERNAL) go/android-sdk-finalization.
-
-    # Update references in the codebase to new API version (TODO)
-    # ...
-
-    # VNDK definitions for new SDK version
-    cp "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-current.txt" \
-       "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-$PLATFORM_SDK_VERSION.txt"
-
-    AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
-
-    # Generate ABI dumps
-    ANDROID_BUILD_TOP="$top" \
-        out/host/linux-x86/bin/create_reference_dumps \
-        -p aosp_arm64 --build-variant user
-
-    echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
-    # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
-    $m check-vndk-list || \
-        { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
-    echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
-
-    # Finalize SDK
-
-    # build/make
-    local version_defaults="$top/build/make/core/version_defaults.mk"
-    sed -i -e "s/PLATFORM_SDK_VERSION := .*/PLATFORM_SDK_VERSION := ${PLATFORM_SDK_VERSION}/g" $version_defaults
-    sed -i -e "s/PLATFORM_VERSION_LAST_STABLE := .*/PLATFORM_VERSION_LAST_STABLE := ${PLATFORM_VERSION}/g" $version_defaults
-    sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
-    cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$PLATFORM_SDK_VERSION.txt"
-
-    # build/soong
-    sed -i -e "/:.*$((${PLATFORM_SDK_VERSION}-1)),/a \\\t\t\t\"${PLATFORM_CODENAME}\":     ${PLATFORM_SDK_VERSION}," "$top/build/soong/android/api_levels.go"
-
-    # cts
-    echo ${PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt"
-    sed -i -e "s/EXPECTED_SDK = $((${PLATFORM_SDK_VERSION}-1))/EXPECTED_SDK = ${PLATFORM_SDK_VERSION}/g" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java"
-
-    # libcore
-    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/libcore/dalvik/src/main/java/dalvik/annotation/compat/VersionCodes.java"
-
-    # platform_testing
-    local version_codes="$top/platform_testing/libraries/compatibility-common-util/src/com/android/compatibility/common/util/VersionCodes.java"
-    sed -i -e "/=.*$((${PLATFORM_SDK_VERSION}-1));/a \\    ${SDK_VERSION}" $version_codes
-
-    # Finalize resources
-    "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
-           "$top/frameworks/base/core/res/res/values/public-staging.xml" \
-           "$top/frameworks/base/core/res/res/values/public-final.xml"
-
-    # frameworks/base
-    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java"
-    sed -i -e "/=.*$((${PLATFORM_SDK_VERSION}-1)),/a \\    SDK_${PLATFORM_CODENAME_JAVA} = ${PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h"
-    sed -i -e "/=.*$((${PLATFORM_SDK_VERSION}-1)),/a \\  SDK_${PLATFORM_CODENAME_JAVA} = ${PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h"
-
-    # Force update current.txt
-    $m clobber
-    $m update-api
-}
-
-finalize_aidl_vndk_sdk_resources
-
diff --git a/finalize-sdk-rel.sh b/finalize-sdk-rel.sh
deleted file mode 100755
index 35899f7..0000000
--- a/finalize-sdk-rel.sh
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_sdk_rel() {
-    local DEV_SRC_DIR="$(dirname "$0")"/../..
-    local BUILD_PREFIX='UP1A'
-    local PLATFORM_CODENAME='UpsideDownCake'
-    local PLATFORM_VERSION='14'
-    local PLATFORM_SDK_VERSION='34'
-
-    # default target to modify tree and build SDK
-    local m="$DEV_SRC_DIR/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # adb keys
-    $m adb
-    LOGNAME=android-eng HOSTNAME=google.com adb keygen "$DEV_SRC_DIR/vendor/google/security/adb/${PLATFORM_VERSION}.adb_key"
-
-    # build/make/core/version_defaults.mk
-    sed -i -e "s/PLATFORM_VERSION_CODENAME.${BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${BUILD_PREFIX} := REL/g" "$DEV_SRC_DIR/build/make/core/version_defaults.mk"
-
-    # cts
-    echo "$PLATFORM_VERSION" > "$DEV_SRC_DIR/cts/tests/tests/os/assets/platform_versions.txt"
-    git -C "$DEV_SRC_DIR/cts" mv hostsidetests/theme/assets/${PLATFORM_CODENAME} hostsidetests/theme/assets/${PLATFORM_SDK_VERSION}
-
-    # system/sepolicy
-    mkdir -p "$DEV_SRC_DIR/system/sepolicy/prebuilts/api/${PLATFORM_SDK_VERSION}.0/"
-    cp -r "$DEV_SRC_DIR/system/sepolicy/public/" "$DEV_SRC_DIR/system/sepolicy/prebuilts/api/${PLATFORM_SDK_VERSION}.0/"
-    cp -r "$DEV_SRC_DIR/system/sepolicy/private/" "$DEV_SRC_DIR/system/sepolicy/prebuilts/api/${PLATFORM_SDK_VERSION}.0/"
-
-    # prebuilts/abi-dumps/ndk
-    git -C "$DEV_SRC_DIR/prebuilts/abi-dumps/ndk" mv current ${PLATFORM_SDK_VERSION}
-
-    # prebuilts/abi-dumps/vndk
-    git -C "$DEV_SRC_DIR/prebuilts/abi-dumps/vndk" mv ${PLATFORM_CODENAME} ${PLATFORM_SDK_VERSION}
-
-    # prebuilts/abi-dumps/platform
-    git -C "$DEV_SRC_DIR/prebuilts/abi-dumps/platform" mv current ${PLATFORM_SDK_VERSION}
-}
-
-finalize_sdk_rel
-
diff --git a/finalize-step-1-for-build-target.sh b/finalize-step-1-for-build-target.sh
deleted file mode 100755
index 52cf0a7..0000000
--- a/finalize-step-1-for-build-target.sh
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/bin/bash
-# Continuous Integration script for *-finalization-1 branches.
-# Reverts previous finalization script commits and runs local build.
-
-set -ex
-
-function revert_to_unfinalized_state() {
-    declare -a projects=(
-        "build/make/"
-        "build/soong/"
-        "cts/"
-        "frameworks/base/"
-        "frameworks/hardware/interfaces/"
-        "frameworks/libs/modules-utils/"
-        "frameworks/libs/net/"
-        "hardware/interfaces/"
-        "libcore/"
-        "packages/services/Car/"
-        "platform_testing/"
-        "prebuilts/abi-dumps/ndk/"
-        "prebuilts/abi-dumps/platform/"
-        "prebuilts/abi-dumps/vndk/"
-        "system/hardware/interfaces/"
-        "system/tools/aidl/"
-        "tools/platform-compat"
-        "device/generic/car"
-        "development"
-    )
-
-    for project in "${projects[@]}"
-    do
-        local git_path="$top/$project"
-        echo "Reverting: $git_path"
-        baselineHash="$(git -C $git_path log --format=%H --no-merges --max-count=1 --grep ^FINALIZATION_STEP_1_BASELINE_COMMIT)" ;
-        if [[ $baselineHash ]]; then
-          previousHash="$(git -C $git_path log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT $baselineHash..HEAD | tr \n \040)" ;
-        else
-          previousHash="$(git -C $git_path log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT | tr \n \040)" ;
-        fi ;
-        if [[ $previousHash ]]; then git -C $git_path revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;
-    done
-}
-
-function finalize_step_1_main() {
-    local top="$(dirname "$0")"/../..
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    revert_to_unfinalized_state
-
-    # vndk etc finalization
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-finalize_step_1_main
diff --git a/finalize-step-1.sh b/finalize-step-1.sh
deleted file mode 100755
index 20bc2bf..0000000
--- a/finalize-step-1.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-# Automation for finalize_branch_for_release.sh.
-# Sets up local environment, runs the finalization script and submits the results.
-# WIP:
-# - does not submit, only sends to gerrit.
-
-# set -ex
-
-function revert_to_unfinalized_state() {
-    repo forall -c '\
-        git checkout . ; git revert --abort ; git clean -fdx ;\
-        git checkout @ ; git branch fina-step1 -D ; git reset --hard; \
-        repo start fina-step1 ; git checkout @ ; git b fina-step1 -D ;\
-        baselineHash="$(git log --format=%H --no-merges --max-count=1 --grep ^FINALIZATION_STEP_1_BASELINE_COMMIT)" ;\
-        if [[ $baselineHash ]]; then
-          previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT $baselineHash..HEAD | tr \n \040)" ;\
-        else
-          previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT | tr \n \040)" ;\
-        fi ; \
-        if [[ $previousHash ]]; then git revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;'
-}
-
-function commit_changes() {
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            repo start fina-step1 ;
-            git add -A . ;
-            git commit -m FINALIZATION_STEP_1_SCRIPT_COMMIT -m WILL_BE_AUTOMATICALLY_REVERTED ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-            git clean -fdx ; git reset --hard ;
-        fi'
-}
-
-function finalize_step_1_main() {
-    local top="$(dirname "$0")"/../..
-
-    repo selfupdate
-
-    revert_to_unfinalized_state
-
-    # vndk etc finalization
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # move all changes to fina-step1 branch and commit with a robot message
-    commit_changes
-}
-
-finalize_step_1_main
diff --git a/finalize-step-2-for-build-target.sh b/finalize-step-2-for-build-target.sh
deleted file mode 100755
index caf415f..0000000
--- a/finalize-step-2-for-build-target.sh
+++ /dev/null
@@ -1,60 +0,0 @@
-#!/bin/bash
-# Continuous Integration script for *-finalization-2 branches.
-# Reverts previous finalization script commits and runs local build.
-
-set -ex
-
-function revert_to_unfinalized_state() {
-    declare -a projects=(
-        "build/make/"
-        "build/soong/"
-        "cts/"
-        "frameworks/base/"
-        "frameworks/hardware/interfaces/"
-        "frameworks/libs/modules-utils/"
-        "frameworks/libs/net/"
-        "hardware/interfaces/"
-        "libcore/"
-        "packages/services/Car/"
-        "platform_testing/"
-        "prebuilts/abi-dumps/ndk/"
-        "prebuilts/abi-dumps/platform/"
-        "prebuilts/abi-dumps/vndk/"
-        "system/hardware/interfaces/"
-        "system/tools/aidl/"
-        "tools/platform-compat"
-        "device/generic/car"
-        "development"
-    )
-
-    for project in "${projects[@]}"
-    do
-        local git_path="$top/$project"
-        echo "Reverting: $git_path"
-        baselineHash="$(git -C $git_path log --format=%H --no-merges --max-count=1 --grep ^FINALIZATION_STEP_1_BASELINE_COMMIT)" ;
-        if [[ $baselineHash ]]; then
-          previousHash="$(git -C $git_path log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT $baselineHash..HEAD | tr \n \040)" ;
-        else
-          previousHash="$(git -C $git_path log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_1_SCRIPT_COMMIT | tr \n \040)" ;
-        fi ;
-        if [[ $previousHash ]]; then git -C $git_path revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;
-    done
-}
-
-function finalize_step_2_main() {
-    local top="$(dirname "$0")"/../..
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    revert_to_unfinalized_state
-
-    # vndk etc finalization
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # prebuilts etc
-    source $top/build/make/finalize-sdk-rel.sh
-
-    # build to confirm everything is OK
-    AIDL_FROZEN_REL=true $m
-}
-
-finalize_step_2_main
diff --git a/finalize-step-2.sh b/finalize-step-2.sh
deleted file mode 100755
index 627153b..0000000
--- a/finalize-step-2.sh
+++ /dev/null
@@ -1,48 +0,0 @@
-#!/bin/bash
-# Automation for finalize_branch_for_release.sh.
-# Sets up local environment, runs the finalization script and submits the results.
-# WIP:
-# - does not submit, only sends to gerrit.
-
-# set -ex
-
-function revert_to_unfinalized_state() {
-    repo forall -c '\
-        git checkout . ; git revert --abort ; git clean -fdx ;\
-        git checkout @ ; git branch fina-step2 -D ; git reset --hard; \
-        repo start fina-step2 ; git checkout @ ; git b fina-step2 -D ;\
-        baselineHash="$(git log --format=%H --no-merges --max-count=1 --grep ^FINALIZATION_STEP_2_BASELINE_COMMIT)" ;\
-        if [[ $baselineHash ]]; then
-          previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_2_SCRIPT_COMMIT $baselineHash..HEAD | tr \n \040)" ;\
-        else
-          previousHash="$(git log --format=%H --no-merges --max-count=100 --grep ^FINALIZATION_STEP_2_SCRIPT_COMMIT | tr \n \040)" ;\
-        fi ; \
-        if [[ $previousHash ]]; then git revert --no-commit --strategy=ort --strategy-option=ours $previousHash ; fi ;'
-}
-
-function commit_changes() {
-    repo forall -c '\
-        if [[ $(git status --short) ]]; then
-            repo start fina-step1 ;
-            git add -A . ;
-            git commit -m FINALIZATION_STEP_2_SCRIPT_COMMIT -m WILL_BE_AUTOMATICALLY_REVERTED ;
-            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
-            git clean -fdx ; git reset --hard ;
-        fi'
-}
-
-function finalize_step_2_main() {
-    local top="$(dirname "$0")"/../..
-
-    repo selfupdate
-
-    revert_to_unfinalized_state
-
-    # vndk etc finalization
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # move all changes to fina-step1 branch and commit with a robot message
-    commit_changes
-}
-
-finalize_step_2_main
diff --git a/finalize_branch_for_release.sh b/finalize_branch_for_release.sh
deleted file mode 100755
index 9e9d6a1..0000000
--- a/finalize_branch_for_release.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-
-set -ex
-
-function finalize_main() {
-    local top="$(dirname "$0")"/../..
-
-    # default target to modify tree and build SDK
-    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
-
-    # Build finalization artifacts.
-    source $top/build/make/finalize-aidl-vndk-sdk-resources.sh
-
-    # This command tests:
-    #   The release state for AIDL.
-    #   ABI difference between user and userdebug builds.
-    #   Resource/SDK finalization.
-    # In the future, we would want to actually turn the branch into the REL
-    # state and test with that.
-    AIDL_FROZEN_REL=true $m
-
-    # Build SDK (TODO)
-    # lunch sdk...
-    # m ...
-}
-
-finalize_main
-
diff --git a/target/OWNERS b/target/OWNERS
deleted file mode 100644
index feb2742..0000000
--- a/target/OWNERS
+++ /dev/null
@@ -1 +0,0 @@
-hansson@google.com
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index 436ff97..a6784cb 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -66,6 +66,12 @@
   init_system \
   linker \
   shell_and_utilities \
+  com.android.art \
+  com.android.conscrypt \
+  com.android.i18n \
+  com.android.runtime \
+  com.android.tzdata \
+  com.android.os.statsd \
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/default_art_config.mk)
 PRODUCT_USES_DEFAULT_ART_CONFIG := false
@@ -73,6 +79,12 @@
 PRODUCT_ARTIFACT_PATH_REQUIREMENT_ALLOWED_LIST += \
     root/init.zygote64.rc
 
+# TODO(b/206676167): This property can be removed when renderscript is removed.
+# Prevents framework from attempting to load renderscript libraries, which are
+# not supported on this architecture.
+PRODUCT_SYSTEM_PROPERTIES += \
+    config.disable_renderscript=1 \
+
 # This build configuration supports 64-bit apps only
 PRODUCT_NAME := aosp_riscv64
 PRODUCT_DEVICE := generic_riscv64
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 35fe937..94b5c16 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -383,11 +383,13 @@
     iotop \
     iperf3 \
     iw \
+    libclang_rt.ubsan_standalone \
     logpersist.start \
     logtagd.rc \
     procrank \
     profcollectd \
     profcollectctl \
+    record_binder \
     servicedispatcher \
     showmap \
     sqlite3 \
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index bd4fd1c..39666ea 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -122,6 +122,7 @@
 # without exceptions).
 PRODUCT_SYSTEM_PROPERTIES += \
     pm.dexopt.post-boot?=extract \
+    pm.dexopt.boot-after-mainline-update?=verify \
     pm.dexopt.install?=speed-profile \
     pm.dexopt.install-fast?=skip \
     pm.dexopt.install-bulk?=speed-profile \
diff --git a/target/product/window_extensions.mk b/target/product/window_extensions.mk
new file mode 100644
index 0000000..5f5431f
--- /dev/null
+++ b/target/product/window_extensions.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# /system_ext packages
+PRODUCT_PACKAGES += \
+    androidx.window.extensions \
+    androidx.window.sidecar
+
+# properties
+PRODUCT_PRODUCT_PROPERTIES += \
+    persist.wm.extensions.enabled=true
diff --git a/tests/run.rbc b/tests/run.rbc
index 1b51719..33583eb 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -46,6 +46,11 @@
 assert_eq("a b c", rblf.mkstrip("  a b   \n  c \t"))
 assert_eq("1", rblf.mkstrip("1 "))
 
+assert_eq(["a", "b"], rblf.words("a b"))
+assert_eq(["a", "b", "c"], rblf.words(["a b", "c"]))
+# 1-tuple like we use in product variables
+assert_eq(["a b", ("c",)], rblf.words(["a b", ("c",)]))
+
 assert_eq("b1 b2", rblf.mksubst("a", "b", "a1 a2"))
 assert_eq(["b1", "x2"], rblf.mksubst("a", "b", ["a1", "x2"]))
 
diff --git a/tools/Android.bp b/tools/Android.bp
index 1f0d406..c5c02c6 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -64,3 +64,24 @@
   name: "check_elf_file",
   srcs: ["check_elf_file.py"],
 }
+
+python_binary_host {
+  name: "generate_gts_shared_report",
+  srcs: ["generate_gts_shared_report.py"],
+}
+
+python_binary_host {
+    name: "generate-sbom",
+    srcs: [
+        "generate-sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+    ],
+}
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index 943f238..ce64160 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 #
 # Copyright (C) 2017 The Android Open Source Project
 #
@@ -69,7 +69,7 @@
   module = os.path.splitext(os.path.basename(target_config))[0]
   instrumentation = instrumentation_elements[0]
   manifest = manifest_elements[0]
-  if instrumentation.attributes.has_key(ATTRIBUTE_LABEL):
+  if ATTRIBUTE_LABEL in instrumentation.attributes:
     label = instrumentation.attributes[ATTRIBUTE_LABEL].value
   else:
     label = module
diff --git a/tools/finalization/OWNERS b/tools/finalization/OWNERS
new file mode 100644
index 0000000..518b60d
--- /dev/null
+++ b/tools/finalization/OWNERS
@@ -0,0 +1,5 @@
+include platform/build/soong:/OWNERS
+smoreland@google.com
+alexbuy@google.com
+patb@google.com
+zyy@google.com
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
new file mode 100755
index 0000000..1b749b1
--- /dev/null
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step12() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # SDK codename -> int
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # Platform/Mainline SDKs build and move to prebuilts
+    source $top/build/make/tools/finalization/localonly-finalize-mainline-sdk.sh
+
+    # REL
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+}
+
+finalize_main_step12
+
diff --git a/tools/finalization/build-step-1-and-m.sh b/tools/finalization/build-step-1-and-m.sh
new file mode 100755
index 0000000..0e7129f
--- /dev/null
+++ b/tools/finalization/build-step-1-and-m.sh
@@ -0,0 +1,19 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step1_and_m() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/build-step-1.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # This command tests:
+    #   The release state for AIDL.
+    #   ABI difference between user and userdebug builds.
+    #   Resource/SDK finalization.
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_main_step1_and_m
+
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
new file mode 100755
index 0000000..a8d590f
--- /dev/null
+++ b/tools/finalization/build-step-1.sh
@@ -0,0 +1,14 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_main_step1() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # Build finalization artifacts.
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+}
+
+finalize_main_step1
+
diff --git a/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff b/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
new file mode 100644
index 0000000..9ced2a9
--- /dev/null
+++ b/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
@@ -0,0 +1,30 @@
+From 12eea1512f2612f41b5cf7004ee2e6a189d548d7 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Thu, 01 Sep 2022 10:44:21 -0700
+Subject: [PATCH] Hacky workaround for half-finalized builds.
+
+Metalava increments the SDK level by one when it's not "REL", so we
+temporarily force the build to be "REL" while we're still in the
+process of finalizing it.
+
+This CL must be reverted as part of actually declaring "REL".
+
+Bug: none
+Test: Build
+Change-Id: I8c24c6dabec0270bc384d8465c582a4ddbe8bd6c
+---
+
+diff --git a/java/droidstubs.go b/java/droidstubs.go
+index 5777b18..ec4a0f4 100644
+--- a/java/droidstubs.go
++++ b/java/droidstubs.go
+@@ -386,7 +386,8 @@
+ 	}
+ 	if apiVersions != nil {
+ 		cmd.FlagWithArg("--current-version ", ctx.Config().PlatformSdkVersion().String())
+-		cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
++		// STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD
++		// cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
+ 		cmd.FlagWithInput("--apply-api-levels ", apiVersions)
+ 	}
+ }
diff --git a/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff b/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
new file mode 100644
index 0000000..7dec97c
--- /dev/null
+++ b/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
@@ -0,0 +1,26 @@
+From c0f6e8fe4c3b6803be97aeea6683631d616412f4 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Thu, 08 Dec 2022 17:52:52 +0000
+Subject: [PATCH] Revert "Hacky workaround for half-finalized builds."
+
+This reverts commit 12eea1512f2612f41b5cf7004ee2e6a189d548d7.
+
+Reason for revert: finalization-2
+
+Change-Id: Ifc801271628808693b1cb20206f8f81c9a6c694d
+---
+
+diff --git a/java/droidstubs.go b/java/droidstubs.go
+index ec4a0f4..5777b18 100644
+--- a/java/droidstubs.go
++++ b/java/droidstubs.go
+@@ -386,8 +386,7 @@
+ 	}
+ 	if apiVersions != nil {
+ 		cmd.FlagWithArg("--current-version ", ctx.Config().PlatformSdkVersion().String())
+-		// STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD
+-		// cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
++		cmd.FlagWithArg("--current-codename ", ctx.Config().PlatformSdkCodename())
+ 		cmd.FlagWithInput("--apply-api-levels ", apiVersions)
+ 	}
+ }
diff --git a/finalize-cleanup.sh b/tools/finalization/cleanup.sh
similarity index 66%
rename from finalize-cleanup.sh
rename to tools/finalization/cleanup.sh
index c62a97c..cd87b1d 100755
--- a/finalize-cleanup.sh
+++ b/tools/finalization/cleanup.sh
@@ -4,7 +4,11 @@
 # set -ex
 
 function finalize_revert_local_changes_main() {
-    local top="$(dirname "$0")"/../..
+    local top="$(dirname "$0")"/../../../..
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # remove the out folder
+    $m clobber
 
     repo selfupdate
 
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
new file mode 100755
index 0000000..14951b8
--- /dev/null
+++ b/tools/finalization/environment.sh
@@ -0,0 +1,15 @@
+#!/bin/bash
+
+set -ex
+
+export FINAL_BUG_ID='0'
+
+export FINAL_PLATFORM_CODENAME='UpsideDownCake'
+export CURRENT_PLATFORM_CODENAME='UpsideDownCake'
+export FINAL_PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
+export FINAL_PLATFORM_SDK_VERSION='34'
+export FINAL_PLATFORM_VERSION='14'
+
+export FINAL_BUILD_PREFIX='UP1A'
+
+export FINAL_MAINLINE_EXTENSION='7'
\ No newline at end of file
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
new file mode 100755
index 0000000..cdc2e3a
--- /dev/null
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -0,0 +1,148 @@
+#!/bin/bash
+
+set -ex
+
+function apply_droidstubs_hack() {
+    if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
+        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+    fi
+}
+
+function finalize_bionic_ndk() {
+    # Adding __ANDROID_API_<>__.
+    # If this hasn't done then it's not used and not really needed. Still, let's check and add this.
+    local api_level="$top/bionic/libc/include/android/api-level.h"
+    if ! grep -q "\__.*$((${FINAL_PLATFORM_SDK_VERSION}))" $api_level ; then
+        local tmpfile=$(mktemp /tmp/finalization.XXXXXX)
+        echo "
+/** Names the \"${FINAL_PLATFORM_CODENAME:0:1}\" API level ($FINAL_PLATFORM_SDK_VERSION), for comparison against \`__ANDROID_API__\`. */
+#define __ANDROID_API_${FINAL_PLATFORM_CODENAME:0:1}__ $FINAL_PLATFORM_SDK_VERSION" > "$tmpfile"
+
+        local api_level="$top/bionic/libc/include/android/api-level.h"
+        sed -i -e "/__.*$((${FINAL_PLATFORM_SDK_VERSION}-1))/r""$tmpfile" $api_level
+
+        rm "$tmpfile"
+    fi
+}
+
+function finalize_modules_utils() {
+    local shortCodename="${FINAL_PLATFORM_CODENAME:0:1}"
+    local methodPlaceholder="INSERT_NEW_AT_LEAST_${shortCodename}_METHOD_HERE"
+
+    local tmpfile=$(mktemp /tmp/finalization.XXXXXX)
+    echo "    /** Checks if the device is running on a release version of Android $FINAL_PLATFORM_CODENAME or newer */
+    @ChecksSdkIntAtLeast(api = $FINAL_PLATFORM_SDK_VERSION /* BUILD_VERSION_CODES.$FINAL_PLATFORM_CODENAME */)
+    public static boolean isAtLeast${FINAL_PLATFORM_CODENAME:0:1}() {
+        return SDK_INT >= $FINAL_PLATFORM_SDK_VERSION;
+    }" > "$tmpfile"
+
+    local javaFuncRegex='\/\*\*[^{]*isAtLeast'"${shortCodename}"'() {[^{}]*}'
+    local javaFuncReplace="N;N;N;N;N;N;N;N; s/$javaFuncRegex/$methodPlaceholder/; /$javaFuncRegex/!{P;D};"
+
+    local javaSdkLevel="$top/frameworks/libs/modules-utils/java/com/android/modules/utils/build/SdkLevel.java"
+    sed -i "$javaFuncReplace" $javaSdkLevel
+
+    sed -i "/${methodPlaceholder}"'/{
+           r '"$tmpfile"'
+           d}' $javaSdkLevel
+
+    echo "// Checks if the device is running on release version of Android ${FINAL_PLATFORM_CODENAME:0:1} or newer.
+inline bool IsAtLeast${FINAL_PLATFORM_CODENAME:0:1}() { return android_get_device_api_level() >= $FINAL_PLATFORM_SDK_VERSION; }" > "$tmpfile"
+
+    local cppFuncRegex='\/\/[^{]*IsAtLeast'"${shortCodename}"'() {[^{}]*}'
+    local cppFuncReplace="N;N;N;N;N;N; s/$cppFuncRegex/$methodPlaceholder/; /$cppFuncRegex/!{P;D};"
+
+    local cppSdkLevel="$top/frameworks/libs/modules-utils/build/include/android-modules-utils/sdk_level.h"
+    sed -i "$cppFuncReplace" $cppSdkLevel
+    sed -i "/${methodPlaceholder}"'/{
+           r '"$tmpfile"'
+           d}' $cppSdkLevel
+
+    rm "$tmpfile"
+}
+
+function finalize_aidl_vndk_sdk_resources() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local SDK_CODENAME="public static final int $FINAL_PLATFORM_CODENAME_JAVA = CUR_DEVELOPMENT;"
+    local SDK_VERSION="public static final int $FINAL_PLATFORM_CODENAME_JAVA = $FINAL_PLATFORM_SDK_VERSION;"
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
+
+    # The full process can be found at (INTERNAL) go/android-sdk-finalization.
+
+    # apply droidstubs hack to prevent tools from incrementing an API version
+    apply_droidstubs_hack
+
+    # bionic/NDK
+    finalize_bionic_ndk
+
+    # VNDK definitions for new SDK version
+    cp "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-current.txt" \
+       "$top/development/vndk/tools/definition-tool/datasets/vndk-lib-extra-list-$FINAL_PLATFORM_SDK_VERSION.txt"
+
+    AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
+
+    # Generate ABI dumps
+    ANDROID_BUILD_TOP="$top" \
+        out/host/linux-x86/bin/create_reference_dumps \
+        -p aosp_arm64 --build-variant user
+
+    echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
+    # Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
+    $m check-vndk-list || \
+        { cp $top/out/soong/vndk/vndk.libraries.txt $top/build/make/target/product/gsi/current.txt; }
+    echo "DONE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF"
+
+    # Finalize SDK
+
+    # frameworks/libs/modules-utils
+    finalize_modules_utils
+
+    # build/make
+    local version_defaults="$top/build/make/core/version_defaults.mk"
+    sed -i -e "s/PLATFORM_SDK_VERSION := .*/PLATFORM_SDK_VERSION := ${FINAL_PLATFORM_SDK_VERSION}/g" $version_defaults
+    sed -i -e "s/PLATFORM_VERSION_LAST_STABLE := .*/PLATFORM_VERSION_LAST_STABLE := ${FINAL_PLATFORM_VERSION}/g" $version_defaults
+    sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
+    cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
+
+    # build/soong
+    sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t\t\"${FINAL_PLATFORM_CODENAME}\":     ${FINAL_PLATFORM_SDK_VERSION}," "$top/build/soong/android/api_levels.go"
+
+    # cts
+    echo ${FINAL_PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt"
+    sed -i -e "s/EXPECTED_SDK = $((${FINAL_PLATFORM_SDK_VERSION}-1))/EXPECTED_SDK = ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/cts/tests/tests/os/src/android/os/cts/BuildVersionTest.java"
+
+    # libcore
+    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/libcore/dalvik/src/main/java/dalvik/annotation/compat/VersionCodes.java"
+
+    # platform_testing
+    local version_codes="$top/platform_testing/libraries/compatibility-common-util/src/com/android/compatibility/common/util/VersionCodes.java"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1));/a \\    ${SDK_VERSION}" $version_codes
+
+    # Finalize resources
+    "$top/frameworks/base/tools/aapt2/tools/finalize_res.py" \
+           "$top/frameworks/base/core/res/res/values/public-staging.xml" \
+           "$top/frameworks/base/core/res/res/values/public-final.xml"
+
+    # frameworks/base
+    sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\    SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h"
+    sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\  SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h"
+
+    # Bump Mainline SDK extension version.
+    set +e
+    "$top/packages/modules/SdkExtensions/gen_sdk/bump_sdk.sh" ${FINAL_MAINLINE_EXTENSION}
+    set -e
+    local version_defaults="$top/build/make/core/version_defaults.mk"
+    sed -i -e "s/PLATFORM_SDK_EXTENSION_VERSION := .*/PLATFORM_SDK_EXTENSION_VERSION := ${FINAL_MAINLINE_EXTENSION}/g" $version_defaults
+
+    # Force update current.txt
+    $m clobber
+    $m update-api
+}
+
+finalize_aidl_vndk_sdk_resources
+
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
new file mode 100755
index 0000000..5cc90cf
--- /dev/null
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -0,0 +1,60 @@
+#!/bin/bash
+
+set -ex
+
+function revert_droidstubs_hack() {
+    if grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
+        git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.revert_hack.diff
+    fi
+}
+
+function apply_prerelease_sdk_hack() {
+    if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then
+        git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_hack.diff
+    fi
+}
+
+function finalize_sdk_rel() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # default target to modify tree and build SDK
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug DIST_DIR=out/dist"
+
+    # revert droidstubs hack now we are switching to REL
+    revert_droidstubs_hack
+
+    # let the apps built with pre-release SDK parse
+    apply_prerelease_sdk_hack
+
+    # adb keys
+    $m adb
+    LOGNAME=android-eng HOSTNAME=google.com "$top/out/host/linux-x86/bin/adb" keygen "$top/vendor/google/security/adb/${FINAL_PLATFORM_VERSION}.adb_key"
+
+    # build/make/core/version_defaults.mk
+    sed -i -e "s/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := REL/g" "$top/build/make/core/version_defaults.mk"
+
+    # cts
+    echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt"
+    if [ "$FINAL_PLATFORM_CODENAME" != "$CURRENT_PLATFORM_CODENAME" ]; then
+        echo "$CURRENT_PLATFORM_CODENAME" >> "./cts/tests/tests/os/assets/platform_versions.txt"
+    fi
+    git -C "$top/cts" mv hostsidetests/theme/assets/${FINAL_PLATFORM_CODENAME} hostsidetests/theme/assets/${FINAL_PLATFORM_SDK_VERSION}
+
+    # system/sepolicy
+    mkdir -p "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+    cp -r "$top/system/sepolicy/public/" "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+    cp -r "$top/system/sepolicy/private/" "$top/system/sepolicy/prebuilts/api/${FINAL_PLATFORM_SDK_VERSION}.0/"
+
+    # prebuilts/abi-dumps/ndk
+    mv "$top/prebuilts/abi-dumps/ndk/current" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
+
+    # prebuilts/abi-dumps/vndk
+    mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
+
+    # prebuilts/abi-dumps/platform
+    mv "$top/prebuilts/abi-dumps/platform/current" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
+}
+
+finalize_sdk_rel
+
diff --git a/tools/finalization/frameworks_base.apply_hack.diff b/tools/finalization/frameworks_base.apply_hack.diff
new file mode 100644
index 0000000..545c230
--- /dev/null
+++ b/tools/finalization/frameworks_base.apply_hack.diff
@@ -0,0 +1,129 @@
+From 3c9a5321dc94124367f2f4363d85a8f488f5d4d1 Mon Sep 17 00:00:00 2001
+From: Yurii Zubrytskyi <zyy@google.com>
+Date: Wed, 04 May 2022 01:05:24 -0700
+Subject: [PATCH] HACK: allow apps with pre-release SDK RESTRICT AUTOMERGE
+
+Revert before releasing
+Let the apps built with pre-release Tiramisu SDK parse
++ fix a test that didn't expect REL builds to throw
+  when checking for lettered versions
+
+Test: build
+Bug: 225745567
+Bug: 231407096
+Change-Id: Ia0de2ab1a99e5f186f0d871e6225d88bf3308df6
+---
+
+diff --git a/core/java/android/content/pm/PackageParser.java b/core/java/android/content/pm/PackageParser.java
+index c15b3e0..3f4df4d 100644
+--- a/core/java/android/content/pm/PackageParser.java
++++ b/core/java/android/content/pm/PackageParser.java
+@@ -2628,6 +2628,15 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                targetCode)) {
++            Slog.w(TAG, "Package requires development platform " + targetCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return Build.VERSION.SDK_INT;
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + targetCode
+@@ -2699,6 +2708,15 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                minCode)) {
++            Slog.w(TAG, "Package requires min development platform " + minCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return Build.VERSION.SDK_INT;
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + minCode
+diff --git a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+index 3e1c5bb..8cc4cdb 100644
+--- a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
++++ b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+@@ -316,6 +316,15 @@
+             return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                        minCode)) {
++            Slog.w(TAG, "Parsed package requires min development platform " + minCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return input.success(Build.VERSION.SDK_INT);
++        }
++
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK,
+@@ -368,19 +377,27 @@
+             return input.success(targetVers);
+         }
+ 
++        // If it's a pre-release SDK and the codename matches this platform, it
++        // definitely targets this SDK.
++        if (matchTargetCode(platformSdkCodenames, targetCode)) {
++            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
++        }
++
++        // STOPSHIP: hack for the pre-release SDK
++        if (platformSdkCodenames.length == 0
++                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
++                        targetCode)) {
++            Slog.w(TAG, "Parsed package requires development platform " + targetCode
++                    + ", returning current version " + Build.VERSION.SDK_INT);
++            return input.success(Build.VERSION.SDK_INT);
++        }
++
+         try {
+             if (allowUnknownCodenames && UnboundedSdkLevel.isAtMost(targetCode)) {
+                 return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+             }
+         } catch (IllegalArgumentException e) {
+-            // isAtMost() throws it when encountering an older SDK codename
+-            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, e.getMessage());
+-        }
+-
+-        // If it's a pre-release SDK and the codename matches this platform, it
+-        // definitely targets this SDK.
+-        if (matchTargetCode(platformSdkCodenames, targetCode)) {
+-            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
++            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, "Bad package SDK");
+         }
+ 
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+diff --git a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+index 92c7871..687e8f7 100644
+--- a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
++++ b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+@@ -446,14 +446,14 @@
+                         + "    <library \n"
+                         + "        name=\"foo\"\n"
+                         + "        file=\"" + mFooJar + "\"\n"
+-                        + "        on-bootclasspath-before=\"Q\"\n"
++                        + "        on-bootclasspath-before=\"A\"\n"
+                         + "        on-bootclasspath-since=\"W\"\n"
+                         + "     />\n\n"
+                         + " </permissions>";
+         parseSharedLibraries(contents);
+         assertFooIsOnlySharedLibrary();
+         SystemConfig.SharedLibraryEntry entry = mSysConfig.getSharedLibraries().get("foo");
+-        assertThat(entry.onBootclasspathBefore).isEqualTo("Q");
++        assertThat(entry.onBootclasspathBefore).isEqualTo("A");
+         assertThat(entry.onBootclasspathSince).isEqualTo("W");
+     }
+ 
diff --git a/tools/finalization/frameworks_base.revert_hack.diff b/tools/finalization/frameworks_base.revert_hack.diff
new file mode 100644
index 0000000..1d147b1
--- /dev/null
+++ b/tools/finalization/frameworks_base.revert_hack.diff
@@ -0,0 +1,125 @@
+From b4ae5c71f327d00081bbb0b7b26d48eb88761fbc Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Tue, 21 Feb 2023 01:43:14 +0000
+Subject: [PATCH] Revert "HACK: allow apps with pre-release SDK RESTRICT AUTOMERGE"
+
+This reverts commit 3c9a5321dc94124367f2f4363d85a8f488f5d4d1.
+
+Reason for revert: not needed anymore
+
+Change-Id: I5c5e3af78a41e7bd8cbc99464dccc57c345105f3
+---
+
+diff --git a/core/java/android/content/pm/PackageParser.java b/core/java/android/content/pm/PackageParser.java
+index 3f4df4d..c15b3e0 100644
+--- a/core/java/android/content/pm/PackageParser.java
++++ b/core/java/android/content/pm/PackageParser.java
+@@ -2628,15 +2628,6 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                targetCode)) {
+-            Slog.w(TAG, "Package requires development platform " + targetCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return Build.VERSION.SDK_INT;
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + targetCode
+@@ -2708,15 +2699,6 @@
+             return Build.VERSION_CODES.CUR_DEVELOPMENT;
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                minCode)) {
+-            Slog.w(TAG, "Package requires min development platform " + minCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return Build.VERSION.SDK_INT;
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             outError[0] = "Requires development platform " + minCode
+diff --git a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+index 8cc4cdb..3e1c5bb 100644
+--- a/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
++++ b/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java
+@@ -316,15 +316,6 @@
+             return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                        minCode)) {
+-            Slog.w(TAG, "Parsed package requires min development platform " + minCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return input.success(Build.VERSION.SDK_INT);
+-        }
+-
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+         if (platformSdkCodenames.length > 0) {
+             return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK,
+@@ -377,27 +368,19 @@
+             return input.success(targetVers);
+         }
+ 
+-        // If it's a pre-release SDK and the codename matches this platform, it
+-        // definitely targets this SDK.
+-        if (matchTargetCode(platformSdkCodenames, targetCode)) {
+-            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+-        }
+-
+-        // STOPSHIP: hack for the pre-release SDK
+-        if (platformSdkCodenames.length == 0
+-                && Build.VERSION.KNOWN_CODENAMES.stream().max(String::compareTo).orElse("").equals(
+-                        targetCode)) {
+-            Slog.w(TAG, "Parsed package requires development platform " + targetCode
+-                    + ", returning current version " + Build.VERSION.SDK_INT);
+-            return input.success(Build.VERSION.SDK_INT);
+-        }
+-
+         try {
+             if (allowUnknownCodenames && UnboundedSdkLevel.isAtMost(targetCode)) {
+                 return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+             }
+         } catch (IllegalArgumentException e) {
+-            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, "Bad package SDK");
++            // isAtMost() throws it when encountering an older SDK codename
++            return input.error(PackageManager.INSTALL_FAILED_OLDER_SDK, e.getMessage());
++        }
++
++        // If it's a pre-release SDK and the codename matches this platform, it
++        // definitely targets this SDK.
++        if (matchTargetCode(platformSdkCodenames, targetCode)) {
++            return input.success(Build.VERSION_CODES.CUR_DEVELOPMENT);
+         }
+ 
+         // Otherwise, we're looking at an incompatible pre-release SDK.
+diff --git a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+index 687e8f7..92c7871 100644
+--- a/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
++++ b/services/tests/servicestests/src/com/android/server/systemconfig/SystemConfigTest.java
+@@ -446,14 +446,14 @@
+                         + "    <library \n"
+                         + "        name=\"foo\"\n"
+                         + "        file=\"" + mFooJar + "\"\n"
+-                        + "        on-bootclasspath-before=\"A\"\n"
++                        + "        on-bootclasspath-before=\"Q\"\n"
+                         + "        on-bootclasspath-since=\"W\"\n"
+                         + "     />\n\n"
+                         + " </permissions>";
+         parseSharedLibraries(contents);
+         assertFooIsOnlySharedLibrary();
+         SystemConfig.SharedLibraryEntry entry = mSysConfig.getSharedLibraries().get("foo");
+-        assertThat(entry.onBootclasspathBefore).isEqualTo("A");
++        assertThat(entry.onBootclasspathBefore).isEqualTo("Q");
+         assertThat(entry.onBootclasspathSince).isEqualTo("W");
+     }
+ 
diff --git a/tools/finalization/localonly-finalize-mainline-sdk.sh b/tools/finalization/localonly-finalize-mainline-sdk.sh
new file mode 100755
index 0000000..f614fc1
--- /dev/null
+++ b/tools/finalization/localonly-finalize-mainline-sdk.sh
@@ -0,0 +1,20 @@
+#!/bin/bash
+
+set -ex
+
+function finalize_locally_mainline_sdk() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    # Build Platform SDKs.
+    $top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist
+
+    # Build Modules SDKs.
+    TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh"
+
+    # Update prebuilts.
+    "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
+}
+
+finalize_locally_mainline_sdk
+
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
new file mode 100755
index 0000000..cf21e45
--- /dev/null
+++ b/tools/finalization/step-1.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Script to perform a 1st step of Android Finalization: API/SDK finalization, create CLs and upload to Gerrit.
+
+set -ex
+
+function commit_step_1_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
+            git add -A . ;
+            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION" \
+                       -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_1_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # vndk etc finalization
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_1_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_1_main
diff --git a/tools/finalization/step-2.sh b/tools/finalization/step-2.sh
new file mode 100755
index 0000000..d0b24ae
--- /dev/null
+++ b/tools/finalization/step-2.sh
@@ -0,0 +1,34 @@
+#!/bin/bash
+# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
+
+function commit_step_2_changes() {
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
+            git add -A . ;
+            git commit -m "$FINAL_PLATFORM_CODENAME/$FINAL_PLATFORM_SDK_VERSION is now REL" \
+                       -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
+Bug: $FINAL_BUG_ID
+Test: build";
+
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function finalize_step_2_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # prebuilts etc
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    commit_step_2_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+finalize_step_2_main
diff --git a/tools/generate-sbom.py b/tools/generate-sbom.py
new file mode 100755
index 0000000..eae7945
--- /dev/null
+++ b/tools/generate-sbom.py
@@ -0,0 +1,684 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+                   --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+                   --product_out_dir=out/target/product/vsoc_x86_64 \
+                   --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+                   --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import json
+import os
+import metadata_file_pb2
+
+# Common
+SPDXID = 'SPDXID'
+SPDX_VERSION = 'SPDXVersion'
+DATA_LICENSE = 'DataLicense'
+DOCUMENT_NAME = 'DocumentName'
+DOCUMENT_NAMESPACE = 'DocumentNamespace'
+CREATED = 'Created'
+CREATOR = 'Creator'
+EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+# Package
+PACKAGE_NAME = 'PackageName'
+PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+PACKAGE_VERSION = 'PackageVersion'
+PACKAGE_SUPPLIER = 'PackageSupplier'
+FILES_ANALYZED = 'FilesAnalyzed'
+PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+PACKAGE_EXTERNAL_REF = 'ExternalRef'
+# Package license
+PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+# File
+FILE_NAME = 'FileName'
+FILE_CHECKSUM = 'FileChecksum'
+# File license
+FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+FILE_LICENSE_COMMENTS = 'LicenseComments'
+FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+FILE_NOTICE = 'FileNotice'
+FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+# Relationship
+RELATIONSHIP = 'Relationship'
+REL_DESCRIBES = 'DESCRIBES'
+REL_VARIANT_OF = 'VARIANT_OF'
+REL_GENERATED_FROM = 'GENERATED_FROM'
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+  parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+  parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def new_doc_header(doc_id):
+  return {
+      SPDX_VERSION: 'SPDX-2.3',
+      DATA_LICENSE: 'CC0-1.0',
+      SPDXID: doc_id,
+      DOCUMENT_NAME: args.build_version,
+      DOCUMENT_NAMESPACE: f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+      CREATOR: 'Organization: Google, LLC',
+      CREATED: '<timestamp>',
+      EXTERNAL_DOCUMENT_REF: [],
+  }
+
+
+def new_package_record(id, name, version, supplier, download_location=None, files_analyzed='false', external_refs=[]):
+  package = {
+      PACKAGE_NAME: name,
+      SPDXID: id,
+      PACKAGE_DOWNLOAD_LOCATION: download_location if download_location else 'NONE',
+      FILES_ANALYZED: files_analyzed,
+  }
+  if version:
+    package[PACKAGE_VERSION] = version
+  if supplier:
+    package[PACKAGE_SUPPLIER] = f'Organization: {supplier}'
+  if external_refs:
+    package[PACKAGE_EXTERNAL_REF] = external_refs
+
+  return package
+
+
+def new_file_record(id, name, checksum):
+  return {
+      FILE_NAME: name,
+      SPDXID: id,
+      FILE_CHECKSUM: checksum
+  }
+
+
+def encode_for_spdxid(s):
+  """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+  result = ''
+  for c in s:
+    if c.isalnum() or c in '.-':
+      result += c
+    elif c in '_@/':
+      result += '-'
+    else:
+      result += '0x' + c.encode('utf-8').hex()
+
+  return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_external_doc_ref(package_name, sbom_url, sbom_checksum):
+  doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(package_name)}'
+  return f'{EXTERNAL_DOCUMENT_REF}: {doc_ref_id} {sbom_url} {sbom_checksum}', doc_ref_id
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def new_relationship_record(id1, relationship, id2):
+  return f'{RELATIONSHIP}: {id1} {relationship} {id2}'
+
+
+def checksum(file_path):
+  file_path = args.product_out_dir + '/' + file_path
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
+      'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
+      'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
+      'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
+      'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
+      'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
+      'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
+      'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
+      'vndk_prebuilt_shared',
+
+      # 'android_test_import',
+      # 'cc_prebuilt_test_library_shared',
+      # 'java_import_host',
+      # 'java_test_import',
+      # 'llvm_host_prebuilt_library_shared',
+      # 'prebuilt_apis',
+      # 'prebuilt_build_tool',
+      # 'prebuilt_defaults',
+      # 'prebuilt_etc',
+      # 'prebuilt_etc_host',
+      # 'prebuilt_etc_xml',
+      # 'prebuilt_font',
+      # 'prebuilt_hidl_interfaces',
+      # 'prebuilt_platform_compat_config',
+      # 'prebuilt_stubs_sources',
+      # 'prebuilt_usr_share',
+      # 'prebuilt_usr_share_host',
+      # 'soong_config_module_type_import',
+  ]
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe23Type {tag.removeprefix(NVD_CPE23)}')
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe22Type {tag.removeprefix(NVD_CPE23)}')
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  external_doc_ref = None
+  packages = []
+  relationships = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = new_package_record(source_package_id, name, args.build_version, args.product_mfr,
+                                        external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = new_package_record(upstream_package_id, name, version, homepage, download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(new_relationship_record(source_package_id, REL_VARIANT_OF, upstream_package_id))
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = new_package_record(prebuilt_package_id, name, args.build_version, args.product_mfr)
+    packages.append(prebuilt_package)
+
+    if metadata_file_path:
+      metadata_proto = metadata_file_protos[metadata_file_path]
+      if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+        sbom_url = metadata_proto.third_party.sbom_ref.url
+        sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+        upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+        if sbom_url and sbom_checksum and upstream_element_id:
+          external_doc_ref, doc_ref_id = new_external_doc_ref(name, sbom_url, sbom_checksum)
+          relationships.append(
+              new_relationship_record(prebuilt_package_id, REL_VARIANT_OF, doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+  checksums = [file[FILE_CHECKSUM] for file in files]
+  checksums.sort()
+  h = hashlib.sha1()
+  h.update(''.join(checksums).encode(encoding='utf-8'))
+  return h.hexdigest()
+
+
+def write_record(f, record):
+  if record.__class__.__name__ == 'dict':
+    for k, v in record.items():
+      if k == EXTERNAL_DOCUMENT_REF or k == PACKAGE_EXTERNAL_REF:
+        for ref in v:
+          f.write(ref + '\n')
+      else:
+        f.write('{}: {}\n'.format(k, v))
+  elif record.__class__.__name__ == 'str':
+    f.write(record + '\n')
+  f.write('\n')
+
+
+def write_tagvalue_sbom(all_records):
+  with open(args.output_file, 'w', encoding="utf-8") as output_file:
+    for rec in all_records:
+      write_record(output_file, rec)
+
+
+def write_json_sbom(all_records, product_package_id):
+  doc = {}
+  product_package = None
+  for r in all_records:
+    if r.__class__.__name__ == 'dict':
+      if DOCUMENT_NAME in r:  # Doc header
+        doc['spdxVersion'] = r[SPDX_VERSION]
+        doc['dataLicense'] = r[DATA_LICENSE]
+        doc[SPDXID] = r[SPDXID]
+        doc['name'] = r[DOCUMENT_NAME]
+        doc['documentNamespace'] = r[DOCUMENT_NAMESPACE]
+        doc['creationInfo'] = {
+            'creators': [r[CREATOR]],
+            'created': r[CREATED],
+        }
+        doc['externalDocumentRefs'] = []
+        for ref in r[EXTERNAL_DOCUMENT_REF]:
+          # ref is 'ExternalDocumentRef: <doc id> <doc url> SHA1: xxxxx'
+          fields = ref.split(' ')
+          doc_ref = {
+              'externalDocumentId': fields[1],
+              'spdxDocument': fields[2],
+              'checksum': {
+                  'algorithm': fields[3][:-1],
+                  'checksumValue': fields[4]
+              }
+          }
+          doc['externalDocumentRefs'].append(doc_ref)
+        doc['documentDescribes'] = []
+        doc['packages'] = []
+        doc['files'] = []
+        doc['relationships'] = []
+
+      elif PACKAGE_NAME in r:  # packages
+        package = {
+            'name': r[PACKAGE_NAME],
+            SPDXID: r[SPDXID],
+            'downloadLocation': r[PACKAGE_DOWNLOAD_LOCATION],
+            'filesAnalyzed': r[FILES_ANALYZED] == "true"
+        }
+        if PACKAGE_VERSION in r:
+          package['versionInfo'] = r[PACKAGE_VERSION]
+        if PACKAGE_SUPPLIER in r:
+          package['supplier'] = r[PACKAGE_SUPPLIER]
+        if PACKAGE_VERIFICATION_CODE in r:
+          package['packageVerificationCode'] = {
+              'packageVerificationCodeValue': r[PACKAGE_VERIFICATION_CODE]
+          }
+        if PACKAGE_EXTERNAL_REF in r:
+          package['externalRefs'] = []
+          for ref in r[PACKAGE_EXTERNAL_REF]:
+            # ref is 'ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4'
+            fields = ref.split(' ')
+            ext_ref = {
+                'referenceCategory': fields[1],
+                'referenceType': fields[2],
+                'referenceLocator': fields[3],
+            }
+            package['externalRefs'].append(ext_ref)
+
+        doc['packages'].append(package)
+        if r[SPDXID] == product_package_id:
+          product_package = package
+          product_package['hasFiles'] = []
+
+      elif FILE_NAME in r:  # files
+        file = {
+            'fileName': r[FILE_NAME],
+            SPDXID: r[SPDXID]
+        }
+        checksum = r[FILE_CHECKSUM].split(': ')
+        file['checksums'] = [{
+            'algorithm': checksum[0],
+            'checksumValue': checksum[1],
+        }]
+        doc['files'].append(file)
+        product_package['hasFiles'].append(r[SPDXID])
+
+    elif r.__class__.__name__ == 'str':
+      if r.startswith(RELATIONSHIP):
+        # r is 'Relationship: <spdxid> <relationship> <spdxid>'
+        fields = r.split(' ')
+        rel = {
+            'spdxElementId': fields[1],
+            'relatedSpdxElement': fields[3],
+            'relationshipType': fields[2],
+        }
+        if fields[2] == REL_DESCRIBES:
+          doc['documentDescribes'].append(fields[3])
+        else:
+          doc['relationships'].append(rel)
+
+  with open(args.output_file + '.json', 'w', encoding="utf-8") as output_file:
+    output_file.write(json.dumps(doc, indent=4))
+
+
+def save_report(report):
+  prefix, _ = os.path.splitext(args.output_file)
+  with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+def sort_rels(rel):
+  # rel = 'Relationship file_id GENERATED_FROM package_id'
+  fields = rel.split(' ')
+  return fields[3] + fields[1]
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_fragment():
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      if args.output_file != args.product_out_dir + installed_file + ".spdx":
+        continue
+
+      module_path = installed_file_metadata['module_path']
+      package_id = new_package_id(encode_for_spdxid(module_path), PKG_PREBUILT)
+      package = new_package_record(package_id, module_path, args.build_version, args.product_mfr)
+      file_id = new_file_id(installed_file)
+      file = new_file_record(file_id, installed_file, checksum(installed_file))
+      relationship = new_relationship_record(file_id, REL_GENERATED_FROM, package_id)
+      records = [package, file, relationship]
+      write_tagvalue_sbom(records)
+      break
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  if args.unbundled:
+    generate_fragment()
+    return
+
+  global metadata_file_protos
+  metadata_file_protos = {}
+
+  doc_id = 'SPDXRef-DOCUMENT'
+  doc_header = new_doc_header(doc_id)
+
+  product_package_id = 'SPDXRef-PRODUCT'
+  product_package = new_package_record(product_package_id, 'PRODUCT', args.build_version, args.product_mfr,
+                                       files_analyzed='true')
+
+  platform_package_id = 'SPDXRef-PLATFORM'
+  platform_package = new_package_record(platform_package_id, 'PLATFORM', args.build_version, args.product_mfr)
+
+  # Report on some issues and information
+  report = {
+      ISSUE_NO_METADATA: [],
+      ISSUE_NO_METADATA_FILE: [],
+      ISSUE_METADATA_FILE_INCOMPLETE: [],
+      ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+      INFO_METADATA_FOUND_FOR_PACKAGE: []
+  }
+
+  # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+  product_files = []
+  package_ids = []
+  package_records = []
+  rels_file_gen_from = []
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      module_path = installed_file_metadata['module_path']
+      product_copy_files = installed_file_metadata['product_copy_files']
+      kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+
+      if not installed_file_has_metadata(installed_file_metadata, report):
+        continue
+
+      file_id = new_file_id(installed_file)
+      product_files.append(new_file_record(file_id, installed_file, checksum(installed_file)))
+
+      if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+        metadata_file_path = get_metadata_file_path(installed_file_metadata)
+        report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+        # File from source fork packages or prebuilt fork packages
+        external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+        if len(pkgs) > 0:
+          if external_doc_ref and external_doc_ref not in doc_header[EXTERNAL_DOCUMENT_REF]:
+            doc_header[EXTERNAL_DOCUMENT_REF].append(external_doc_ref)
+          for p in pkgs:
+            if not p[SPDXID] in package_ids:
+              package_ids.append(p[SPDXID])
+              package_records.append(p)
+          for rel in rels:
+            if not rel in package_records:
+              package_records.append(rel)
+          fork_package_id = pkgs[0][SPDXID]  # The first package should be the source/prebuilt fork package
+          rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, fork_package_id))
+      elif module_path or installed_file_metadata['is_platform_generated']:
+        # File from PLATFORM package
+        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
+      elif product_copy_files:
+        # Format of product_copy_files: <source path>:<dest path>
+        src_path = product_copy_files.split(':')[0]
+        # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+        # so process them as files from PLATFORM package
+        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
+      elif installed_file.endswith('.fsv_meta'):
+        # See build/make/core/Makefile:2988
+        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
+      elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+        # For the four files generated for _dlkm, _ramdisk partitions
+        # See build/make/core/Makefile:323
+        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
+
+  product_package[PACKAGE_VERIFICATION_CODE] = generate_package_verification_code(product_files)
+
+  all_records = [
+      doc_header,
+      product_package,
+      new_relationship_record(doc_id, REL_DESCRIBES, product_package_id),
+  ]
+  all_records += product_files
+  all_records.append(platform_package)
+  all_records += package_records
+  rels_file_gen_from.sort(key=sort_rels)
+  all_records += rels_file_gen_from
+
+  # Save SBOM records to output file
+  doc_header[CREATED] = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  write_tagvalue_sbom(all_records)
+  if args.json:
+    write_json_sbom(all_records, product_package_id)
+
+  save_report(report)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/generate_gts_shared_report.py b/tools/generate_gts_shared_report.py
new file mode 100644
index 0000000..11c9364
--- /dev/null
+++ b/tools/generate_gts_shared_report.py
@@ -0,0 +1,128 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2022 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""
+Checks and generates a report for gts modules that should be open-sourced.
+
+Usage:
+  generate_gts_open_source_report.py
+    --gtsv-metalic [gts-verifier meta_lic]
+    --gts-test-metalic [android-gts meta_lic]
+    --checkshare [COMPLIANCE_CHECKSHARE]
+    --gts-test-dir [directory of android-gts]
+    --output [output file]
+
+Output example:
+  GTS-Verifier: PASS/FAIL
+  GTS-Modules: PASS/FAIL
+    GtsIncrementalInstallTestCases_BackgroundProcess
+    GtsUnsignedNetworkStackTestCases
+"""
+import sys
+import argparse
+import subprocess
+import re
+
+def _get_args():
+    """Parses input arguments."""
+    parser = argparse.ArgumentParser()
+    parser.add_argument(
+        '--gtsv-metalic', required=True,
+        help='license meta_lic file path of gts-verifier.zip')
+    parser.add_argument(
+        '--gts-test-metalic', required=True,
+        help='license meta_lic file path of android-gts.zip')
+    parser.add_argument(
+        '--checkshare', required=True,
+        help='path of the COMPLIANCE_CHECKSHARE tool')
+    parser.add_argument(
+        '--gts-test-dir', required=True,
+        help='directory of android-gts')
+    parser.add_argument(
+        '-o', '--output', required=True,
+        help='file path of the output report')
+    return parser.parse_args()
+
+def _check_gtsv(checkshare: str, gtsv_metalic: str) -> str:
+    """Checks gts-verifier license.
+
+    Args:
+      checkshare: path of the COMPLIANCE_CHECKSHARE tool
+      gtsv_metalic: license meta_lic file path of gts-verifier.zip
+
+    Returns:
+      PASS when gts-verifier.zip doesn't need to be shared, and FAIL
+      when gts-verifier.zip need to be shared.
+    """
+    cmd = f'{checkshare} {gtsv_metalic}'
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    proc.communicate()
+    return 'PASS' if proc.returncode == 0 else 'FAIL'
+
+def _check_gts_test(checkshare: str, gts_test_metalic: str,
+                    gts_test_dir: str) -> tuple[str, set[str]]:
+    """Checks android-gts license.
+
+    Args:
+      checkshare: path of the COMPLIANCE_CHECKSHARE tool
+      gts_test_metalic: license meta_lic file path of android-gts.zip
+      gts_test_dir: directory of android-gts
+
+    Returns:
+      Check result (PASS when android-gts doesn't need to be shared,
+      FAIL when some gts modules need to be shared) and gts modules
+      that need to be shared.
+    """
+    cmd = f'{checkshare} {gts_test_metalic}'
+    proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
+                            stderr=subprocess.PIPE)
+    _, str_stderr = map(lambda b: b.decode(), proc.communicate())
+    if proc.returncode == 0:
+        return 'PASS', []
+    open_source_modules = set()
+    for error_line in str_stderr.split('\n'):
+        # Skip the empty liness
+        if not error_line:
+            continue
+        module_meta_lic = error_line.strip().split()[0]
+        groups = re.fullmatch(
+            re.compile(f'.*/{gts_test_dir}/(.*)'), module_meta_lic)
+        if groups:
+            open_source_modules.add(
+                groups[1].removesuffix('.meta_lic'))
+    return 'FAIL', open_source_modules
+
+
+def main(argv):
+    args = _get_args()
+
+    gtsv_metalic = args.gtsv_metalic
+    gts_test_metalic = args.gts_test_metalic
+    output_file = args.output
+    checkshare = args.checkshare
+    gts_test_dir = args.gts_test_dir
+
+    with open(output_file, 'w') as file:
+        result = _check_gtsv(checkshare, gtsv_metalic)
+        file.write(f'GTS-Verifier: {result}\n')
+        result, open_source_modules = _check_gts_test(
+            checkshare, gts_test_metalic, gts_test_dir)
+        file.write(f'GTS-Modules: {result}\n')
+        for open_source_module in open_source_modules:
+            file.write(f'\t{open_source_module}\n')
+
+if __name__ == "__main__":
+    main(sys.argv)
\ No newline at end of file
diff --git a/tools/protos/Android.bp b/tools/protos/Android.bp
new file mode 100644
index 0000000..c6ad19e
--- /dev/null
+++ b/tools/protos/Android.bp
@@ -0,0 +1,32 @@
+// Copyright 2023 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//     http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_library_host {
+    name: "metadata_file_proto_py",
+    version: {
+        py3: {
+            enabled: true,
+        },
+    },
+    srcs: [
+        "metadata_file.proto",
+    ],
+    proto: {
+        canonical_path_from_root: false,
+    },
+}
diff --git a/tools/protos/metadata_file.proto b/tools/protos/metadata_file.proto
new file mode 100644
index 0000000..ac1129a
--- /dev/null
+++ b/tools/protos/metadata_file.proto
@@ -0,0 +1,281 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto2";
+
+package metadata_file;
+
+// Proto definition of METADATA files of packages in AOSP codebase.
+message Metadata {
+  // Name of the package.
+  optional string name = 1;
+
+  // A short description (a few lines) of the package.
+  // Example: "Handles location lookups, throttling, batching, etc."
+  optional string description = 2;
+
+  // Specifies additional data about third-party packages.
+  optional ThirdParty third_party = 3;
+}
+
+message ThirdParty {
+  // URL(s) associated with the package.
+  //
+  // At a minimum, all packages must specify a URL which identifies where it
+  // came from, containing a type of: ARCHIVE, GIT or OTHER. Typically,
+  // a package should contain only a single URL from these types.  Occasionally,
+  // a package may be broken across multiple archive files for whatever reason,
+  // in which case having multiple ARCHIVE URLs is okay.  However, this should
+  // not be used to combine different logical packages that are versioned and
+  // possibly licensed differently.
+  repeated URL url = 1;
+
+  // The package version.  In order of preference, this should contain:
+  //  - If the package comes from Git or another source control system,
+  //    a specific tag or revision in source control, such as "r123" or
+  //    "58e27d2".  This MUST NOT be a mutable ref such as a branch name.
+  //  - a released package version such as "1.0", "2.3-beta", etc.
+  //  - the date the package was retrieved, formatted as "As of YYYY-MM-DD".
+  optional string version = 2;
+
+  // The date of the change in which the package was last upgraded from
+  // upstream.
+  // This should only identify package upgrades from upstream, not local
+  // modifications. This may identify the date of either the original or
+  // merged change.
+  //
+  // Note: this is NOT the date that this version of the package was released
+  // externally.
+  optional Date last_upgrade_date = 3;
+
+  // License type that identifies how the package may be used.
+  optional LicenseType license_type = 4;
+
+  // An additional note explaining the licensing of this package.  This is most
+  // commonly used with commercial license.
+  optional string license_note = 5;
+
+  // Description of local changes that have been made to the package.  This does
+  // not need to (and in most cases should not) attempt to include an exhaustive
+  // list of all changes, but may instead direct readers to review the local
+  // commit history, a collection of patch files, a separate README.md (or
+  // similar) document, etc.
+  // Note: Use of this field to store IDs of advisories fixed with a backported
+  // patch is deprecated, use "security.mitigated_security_patch" instead.
+  optional string local_modifications = 6;
+
+  // Security related metadata including risk category and any special
+  // instructions for using the package, as determined by an ISE-TPS review.
+  optional Security security = 7;
+
+  // The type of directory this metadata represents.
+  optional DirectoryType type = 8 [default = PACKAGE];
+
+  // The homepage for the package. This will eventually replace
+  // `url { type: HOMEPAGE }`
+  optional string homepage = 9;
+
+  // SBOM information of the package. It is mandatory for prebuilt packages.
+  oneof sbom {
+    // Reference to external SBOM document provided as URL.
+    SBOMRef sbom_ref = 10;
+  }
+
+}
+
+// URL associated with a third-party package.
+message URL {
+  enum Type {
+    // The homepage for the package. For example, "https://bazel.io/". This URL
+    // is optional, but encouraged to help disambiguate similarly named packages
+    // or to get more information about the package. This is especially helpful
+    // when no other URLs provide human readable resources (such as git:// or
+    // sso:// URLs).
+    HOMEPAGE = 1;
+
+    // The URL of the archive containing the source code for the package, for
+    // example a zip or tgz file.
+    ARCHIVE = 2;
+
+    // The URL of the upstream git repository this package is retrieved from.
+    // For example:
+    //  - https://github.com/git/git.git
+    //  - git://git.kernel.org/pub/scm/git/git.git
+    //
+    // Use of a git URL requires that the package "version" value must specify a
+    // specific git tag or revision.
+    GIT = 3;
+
+    // The URL of the upstream SVN repository this package is retrieved from.
+    // For example:
+    //  - http://llvm.org/svn/llvm-project/llvm/
+    //
+    // Use of an SVN URL requires that the package "version" value must specify
+    // a specific SVN tag or revision.
+    SVN = 4;
+
+    // The URL of the upstream mercurial repository this package is retrieved
+    // from. For example:
+    //   - https://mercurial-scm.org/repo/evolve
+    //
+    // Use of a mercurial URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    HG = 5;
+
+    // The URL of the upstream darcs repository this package is retrieved
+    // from. For example:
+    //   - https://hub.darcs.net/hu.dwim/hu.dwim.util
+    //
+    // Use of a DARCS URL requires that the package "version" value must
+    // specify a specific tag or revision.
+    DARCS = 6;
+
+    PIPER = 7;
+
+    // A URL that does not fit any other type. This may also indicate that the
+    // source code was received via email or some other out-of-band way. This is
+    // most commonly used with commercial software received directly from the
+    // vendor. In the case of email, the URL value can be used to provide
+    // additional information about how it was received.
+    OTHER = 8;
+
+    // The URL identifying where the local copy of the package source code can
+    // be found.
+    //
+    // Typically, the metadata files describing a package reside in the same
+    // directory as the source code for the package. In a few rare cases where
+    // they are separate, the LOCAL_SOURCE URL identifies where to find the
+    // source code. This only describes where to find the local copy of the
+    // source; there should always be an additional URL describing where the
+    // package was retrieved from.
+    //
+    // Examples:
+    //  - https://android.googlesource.com/platform/external/apache-http/
+    LOCAL_SOURCE = 9;
+  }
+
+  // The type of resource this URL identifies.
+  optional Type type = 1;
+
+  // The actual URL value.  URLs should be absolute and start with 'http://' or
+  // 'https://' (or occasionally 'git://' or 'ftp://' where appropriate).
+  optional string value = 2;
+}
+
+// License type that identifies how the packages may be used.
+enum LicenseType {
+  BY_EXCEPTION_ONLY = 1;
+  NOTICE = 2;
+  PERMISSIVE = 3;
+  RECIPROCAL = 4;
+  RESTRICTED_IF_STATICALLY_LINKED = 5;
+  RESTRICTED = 6;
+  UNENCUMBERED = 7;
+}
+
+// Identifies security related metadata including risk category and any special
+// instructions for using the package.
+message Security {
+  // Security risk category for a package, as determined by an ISE-TPS review.
+  enum Category {
+    CATEGORY_UNSPECIFIED = 0;
+
+    // Package should only be used in a sandboxed environment.
+    // Package should have restricted visibility.
+    SANDBOXED_ONLY = 1;
+
+    // Package should not be used to process user content. It is considered
+    // safe to use to process trusted data only. Package should have restricted
+    // visibility.
+    TRUSTED_DATA_ONLY = 2;
+
+    // Package is considered safe to use.
+    REVIEWED_AND_SECURE = 3;
+  }
+
+  // Identifies the security risk category for the package.  This will be
+  // provided by the ISE-TPS team as the result of a security review of the
+  // package.
+  optional Category category = 1;
+
+  // An additional security note for the package.
+  optional string note = 2;
+
+  // Text tag to categorize the package. It's currently used by security to:
+  // - to disable OSV (https://osv.dev)
+  // support via the `OSV:disable` tag
+  // - to attach CPE to their corresponding packages, for vulnerability
+  // monitoring:
+  //
+  // Please do document your usecase here should you want to add one.
+  repeated string tag = 3;
+
+  // ID of advisories fixed with a mitigated patch, for example CVE-2018-1111.
+  repeated string mitigated_security_patch = 4;
+}
+
+enum DirectoryType {
+  UNDEFINED = 0;
+
+  // This directory represents a package.
+  PACKAGE = 1;
+
+  // This directory is designed to organize multiple third-party PACKAGE
+  // directories.
+  GROUP = 2;
+
+  // This directory contains several PACKAGE directories representing
+  // different versions of the same third-party project.
+  VERSIONS = 3;
+}
+
+// Represents a whole or partial calendar date, such as a birthday. The time of
+// day and time zone are either specified elsewhere or are insignificant. The
+// date is relative to the Gregorian Calendar. This can represent one of the
+// following:
+//
+// * A full date, with non-zero year, month, and day values.
+// * A month and day, with a zero year (for example, an anniversary).
+// * A year on its own, with a zero month and a zero day.
+// * A year and month, with a zero day (for example, a credit card expiration
+//   date).
+message Date {
+  // Year of the date. Must be from 1 to 9999, or 0 to specify a date without
+  // a year.
+  optional int32 year = 1;
+  // Month of a year. Must be from 1 to 12, or 0 to specify a year without a
+  // month and day.
+  optional int32 month = 2;
+  // Day of a month. Must be from 1 to 31 and valid for the year and month, or 0
+  // to specify a year by itself or a year and month where the day isn't
+  // significant.
+  optional int32 day = 3;
+}
+
+// Reference to external SBOM document and element corresponding to the package.
+// See https://spdx.github.io/spdx-spec/v2.3/document-creation-information/#66-external-document-references-field
+message SBOMRef {
+  // The URL that points to the SBOM document of the upstream package of this
+  // third_party package.
+  optional string url = 1;
+  // Checksum of the SBOM document the url field points to.
+  // Format: e.g. SHA1:<checksum>, or any algorithm defined in
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#8.4
+  optional string checksum = 2;
+  // SPDXID of the upstream package/file defined in the SBOM document the url field points to.
+  // Format: SPDXRef-[a-zA-Z0-9.-]+, see
+  // https://spdx.github.io/spdx-spec/v2.3/package-information/#72-package-spdx-identifier-field or
+  // https://spdx.github.io/spdx-spec/v2.3/file-information/#82-file-spdx-identifier-field
+  optional string element_id = 3;
+}
\ No newline at end of file
diff --git a/tools/rbcrun/Android.bp b/tools/rbcrun/Android.bp
index 90173ac..fcc33ef 100644
--- a/tools/rbcrun/Android.bp
+++ b/tools/rbcrun/Android.bp
@@ -19,7 +19,7 @@
 
 blueprint_go_binary {
     name: "rbcrun",
-    srcs: ["cmd/rbcrun.go"],
+    srcs: ["rbcrun/rbcrun.go"],
     deps: ["rbcrun-module"],
 }
 
diff --git a/tools/rbcrun/go.mod b/tools/rbcrun/go.mod
index a029eb4..5ae2972 100644
--- a/tools/rbcrun/go.mod
+++ b/tools/rbcrun/go.mod
@@ -1,9 +1,6 @@
 module rbcrun
 
-require (
-	github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d // indirect
-	go.starlark.net v0.0.0-20201006213952-227f4aabceb5
-)
+require go.starlark.net v0.0.0-20201006213952-227f4aabceb5
 
 replace go.starlark.net => ../../../../external/starlark-go
 
diff --git a/tools/rbcrun/go.sum b/tools/rbcrun/go.sum
index db4d51e..10761a8 100644
--- a/tools/rbcrun/go.sum
+++ b/tools/rbcrun/go.sum
@@ -1,11 +1,8 @@
 cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
 github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
 github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
-github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
 github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
-github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=
 github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
-github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=
 github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
 github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
 github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4=
@@ -26,8 +23,6 @@
 github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
 github.com/google/go-cmp v0.5.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d h1:AREM5mwr4u1ORQBMvzfzBgpsctsbQikCVpvC+tX285E=
-github.com/nbutton23/zxcvbn-go v0.0.0-20180912185939-ae427f1e4c1d/go.mod h1:o96djdrsSGy3AWPyBgZMAGfxZNfgntdJG+11KU4QvbU=
 github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA=
 golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
 golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
@@ -44,9 +39,6 @@
 golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
 golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
 golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae h1:Ih9Yo4hSPImZOpfGuA4bR/ORKTAbhZo2AbWNRCnevdo=
-golang.org/x/sys v0.0.0-20200625212154-ddb9806d33ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
-golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f h1:+Nyd8tzPX9R7BWHguqsrbFdRx3WQ/1ib8I44HXV5yTA=
 golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
 golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
 golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
diff --git a/tools/rbcrun/cmd/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
similarity index 100%
rename from tools/rbcrun/cmd/rbcrun.go
rename to tools/rbcrun/rbcrun/rbcrun.go
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index 29fc771..a76dc8a 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -334,6 +334,9 @@
         "ota_utils.py",
         "payload_signer.py",
     ],
+    libs: [
+        "releasetools_common",
+    ],
 }
 
 python_binary_host {
@@ -356,6 +359,21 @@
 }
 
 python_binary_host {
+    name: "create_brick_ota",
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    srcs: [
+        "create_brick_ota.py",
+    ],
+    libs: [
+        "ota_utils_lib",
+    ],
+}
+
+python_binary_host {
     name: "build_image",
     defaults: [
         "releasetools_binary_defaults",
@@ -598,6 +616,7 @@
         "testdata/**/*",
         ":com.android.apex.compressed.v1",
         ":com.android.apex.compressed.v1_original",
+        ":com.android.apex.vendor.foo.with_vintf"
     ],
     target: {
         darwin: {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index b45b0a3..e154a0f 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -1001,7 +1001,8 @@
   if has_init_boot:
     banner("init_boot")
     init_boot_image = common.GetBootableImage(
-        "IMAGES/init_boot.img", "init_boot.img", OPTIONS.input_tmp, "INIT_BOOT")
+        "IMAGES/init_boot.img", "init_boot.img", OPTIONS.input_tmp, "INIT_BOOT",
+        dev_nodes=True)
     if init_boot_image:
       partitions['init_boot'] = os.path.join(
           OPTIONS.input_tmp, "IMAGES", "init_boot.img")
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 194ff58..40f7c92 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -66,7 +66,7 @@
     self.fsckerofs_path = os.path.join(
         OPTIONS.search_path, "bin", "fsck.erofs")
     self.blkid_path = os.path.join(
-        OPTIONS.search_path, "bin", "blkid")
+        OPTIONS.search_path, "bin", "blkid_static")
     self.avbtool = avbtool if avbtool else "avbtool"
     self.sign_tool = sign_tool
 
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 9919029..7805599 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -302,6 +302,8 @@
   Raises:
     ExternalError: On non-zero exit from the command.
   """
+  if verbose is None:
+    verbose = OPTIONS.verbose
   proc = Run(args, verbose=verbose, **kwargs)
   output, _ = proc.communicate()
   if output is None:
@@ -1545,14 +1547,20 @@
 
 
 def _MakeRamdisk(sourcedir, fs_config_file=None,
+                 dev_node_file=None,
                  ramdisk_format=RamdiskFormat.GZ):
   ramdisk_img = tempfile.NamedTemporaryFile()
 
-  if fs_config_file is not None and os.access(fs_config_file, os.F_OK):
-    cmd = ["mkbootfs", "-f", fs_config_file,
-           os.path.join(sourcedir, "RAMDISK")]
-  else:
-    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
+  cmd = ["mkbootfs"]
+
+  if fs_config_file and os.access(fs_config_file, os.F_OK):
+    cmd.extend(["-f", fs_config_file])
+
+  if dev_node_file and os.access(dev_node_file, os.F_OK):
+    cmd.extend(["-n", dev_node_file])
+
+  cmd.append(os.path.join(sourcedir, "RAMDISK"))
+
   p1 = Run(cmd, stdout=subprocess.PIPE)
   if ramdisk_format == RamdiskFormat.LZ4:
     p2 = Run(["lz4", "-l", "-12", "--favor-decSpeed"], stdin=p1.stdout,
@@ -1570,7 +1578,8 @@
   return ramdisk_img
 
 
-def _BuildBootableImage(image_name, sourcedir, fs_config_file, info_dict=None,
+def _BuildBootableImage(image_name, sourcedir, fs_config_file,
+                        dev_node_file=None, info_dict=None,
                         has_ramdisk=False, two_step_image=False):
   """Build a bootable image from the specified sourcedir.
 
@@ -1612,7 +1621,7 @@
 
   if has_ramdisk:
     ramdisk_format = GetRamdiskFormat(info_dict)
-    ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file,
+    ramdisk_img = _MakeRamdisk(sourcedir, fs_config_file, dev_node_file,
                                ramdisk_format=ramdisk_format)
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
@@ -1820,7 +1829,8 @@
 
 
 def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
-                     info_dict=None, two_step_image=False):
+                     info_dict=None, two_step_image=False,
+                     dev_nodes=False):
   """Return a File object with the desired bootable image.
 
   Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name',
@@ -1856,6 +1866,8 @@
   fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
   data = _BuildBootableImage(prebuilt_name, os.path.join(unpack_dir, tree_subdir),
                              os.path.join(unpack_dir, fs_config),
+                             os.path.join(unpack_dir, 'META/ramdisk_node_list')
+                                if dev_nodes else None,
                              info_dict, has_ramdisk, two_step_image)
   if data:
     return File(name, data)
@@ -2109,9 +2121,7 @@
   if info_dict is None:
     info_dict = LoadInfoDict(input_zip)
 
-  is_sparse = info_dict.get("extfs_sparse_flag")
-  if info_dict.get(which + "_disable_sparse"):
-    is_sparse = False
+  is_sparse = IsSparseImage(os.path.join(tmpdir, "IMAGES", which + ".img"))
 
   # When target uses 'BOARD_EXT4_SHARE_DUP_BLOCKS := true', images may contain
   # shared blocks (i.e. some blocks will show up in multiple files' block
@@ -2893,13 +2903,12 @@
 
     fd, new_zipfile = tempfile.mkstemp(dir=os.path.dirname(zip_filename))
     os.close(fd)
+    cmd = ["zip2zip", "-i", zip_filename, "-o", new_zipfile]
+    for entry in entries:
+      cmd.append("-x")
+      cmd.append(entry)
+    RunAndCheckOutput(cmd)
 
-    with zipfile.ZipFile(new_zipfile, 'w') as zout:
-      for item in zin.infolist():
-        if item.filename in entries:
-          continue
-        buffer = zin.read(item.filename)
-        zout.writestr(item, buffer)
 
   os.replace(new_zipfile, zip_filename)
 
diff --git a/tools/releasetools/create_brick_ota.py b/tools/releasetools/create_brick_ota.py
new file mode 100644
index 0000000..44f0a95
--- /dev/null
+++ b/tools/releasetools/create_brick_ota.py
@@ -0,0 +1,92 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import argparse
+from pathlib import Path
+import zipfile
+from typing import List
+import common
+import tempfile
+import shutil
+
+PARTITIONS_TO_WIPE = ["/dev/block/by-name/vbmeta",
+                      "/dev/block/by-name/vbmeta_a",
+                      "/dev/block/by-name/vbmeta_b",
+                      "/dev/block/by-name/vbmeta_system_a",
+                      "/dev/block/by-name/vbmeta_system_b",
+                      "/dev/block/by-name/boot",
+                      "/dev/block/by-name/boot_a",
+                      "/dev/block/by-name/boot_b",
+                      "/dev/block/by-name/vendor_boot",
+                      "/dev/block/by-name/vendor_boot_a",
+                      "/dev/block/by-name/vendor_boot_b",
+                      "/dev/block/by-name/init_boot_a",
+                      "/dev/block/by-name/init_boot_b",
+                      "/dev/block/by-name/metadata",
+                      "/dev/block/by-name/super",
+                      "/dev/block/by-name/userdata"]
+
+
+def CreateBrickOta(product_name: str, output_path: Path, extra_wipe_partitions: str, serialno: str):
+  partitions_to_wipe = PARTITIONS_TO_WIPE
+  if extra_wipe_partitions is not None:
+    partitions_to_wipe = PARTITIONS_TO_WIPE + extra_wipe_partitions.split(",")
+  # recovery requiers product name to be a | separated list
+  product_name = product_name.replace(",", "|")
+  with zipfile.ZipFile(output_path, "w") as zfp:
+    zfp.writestr("recovery.wipe", "\n".join(partitions_to_wipe))
+    zfp.writestr("payload.bin", "")
+    zfp.writestr("META-INF/com/android/metadata", "\n".join(
+        ["ota-type=BRICK", "post-timestamp=9999999999", "pre-device=" + product_name, "serialno=" + serialno]))
+
+
+def main(argv):
+  parser = argparse.ArgumentParser(description='Android Brick OTA generator')
+  parser.add_argument('otafile', metavar='PAYLOAD', type=str,
+                      help='The output OTA package file.')
+  parser.add_argument('--product', type=str,
+                      help='The product name of the device, for example, bramble, redfin. This can be a comma separated list.', required=True)
+  parser.add_argument('--serialno', type=str,
+                      help='The serial number of devices that are allowed to install this OTA package. This can be a comma separated list.')
+  parser.add_argument('--extra_wipe_partitions', type=str,
+                      help='Additional partitions on device which should be wiped.')
+  parser.add_argument('-v', action="store_true",
+                      help="Enable verbose logging", dest="verbose")
+  parser.add_argument('--package_key', type=str,
+                      help='Paths to private key for signing payload')
+  parser.add_argument('--search_path', type=str,
+                      help='Search path for framework/signapk.jar')
+  parser.add_argument('--private_key_suffix', type=str,
+                      help='Suffix to be appended to package_key path', default=".pk8")
+  args = parser.parse_args(argv[1:])
+  if args.search_path:
+    common.OPTIONS.search_path = args.search_path
+  if args.verbose:
+    common.OPTIONS.verbose = args.verbose
+  CreateBrickOta(args.product, args.otafile,
+                 args.extra_wipe_partitions, args.serialno)
+  if args.package_key:
+    common.OPTIONS.private_key_suffix = args.private_key_suffix
+    with tempfile.NamedTemporaryFile() as tmpfile:
+      common.SignFile(args.otafile, tmpfile.name,
+                      args.package_key, None, whole_file=True)
+      shutil.copy(tmpfile.name, args.otafile)
+
+
+if __name__ == "__main__":
+  import sys
+  main(sys.argv)
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index c95cead..54122b0 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -156,6 +156,15 @@
     shutil.move(source, destination)
 
 
+def remove_file_if_exists(file_name):
+  """Remove the file if it exists and skip otherwise."""
+
+  try:
+    os.remove(file_name)
+  except FileNotFoundError:
+    pass
+
+
 def create_merged_package(temp_dir):
   """Merges two target files packages into one target files structure.
 
@@ -210,8 +219,7 @@
   If odm is present then odm is preferred -- otherwise vendor is used.
   """
   partition = 'vendor'
-  if os.path.exists(os.path.join(target_files_dir, 'ODM')) or os.path.exists(
-      os.path.join(target_files_dir, 'IMAGES/odm.img')):
+  if os.path.exists(os.path.join(target_files_dir, 'ODM')):
     partition = 'odm'
   partition_img = '{}.img'.format(partition)
   partition_map = '{}.map'.format(partition)
@@ -245,7 +253,8 @@
   if not OPTIONS.vendor_otatools:
     # Remove the partition from the merged target-files archive. It will be
     # rebuilt later automatically by generate_missing_images().
-    os.remove(os.path.join(target_files_dir, 'IMAGES', partition_img))
+    remove_file_if_exists(
+        os.path.join(target_files_dir, 'IMAGES', partition_img))
     return
 
   # TODO(b/192253131): Remove the need for vendor_otatools by fixing
@@ -274,7 +283,8 @@
       symlinks=True)
 
   # Delete then rebuild the partition.
-  os.remove(os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
+  remove_file_if_exists(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_img))
   rebuild_partition_command = [
       os.path.join(vendor_otatools_dir, 'bin', 'add_img_to_target_files'),
       '--verbose',
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index ac85aa4..7078d67 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -48,17 +48,12 @@
     # if the filesystem is ext4.
     partition_source_info = source_info["fstab"]["/" + name]
     check_first_block = partition_source_info.fs_type == "ext4"
-    # Disable using imgdiff for squashfs. 'imgdiff -z' expects input files to be
-    # in zip formats. However with squashfs, a) all files are compressed in LZ4;
-    # b) the blocks listed in block map may not contain all the bytes for a
-    # given file (because they're rounded to be 4K-aligned).
-    partition_target_info = target_info["fstab"]["/" + name]
-    disable_imgdiff = (partition_source_info.fs_type == "squashfs" or
-                       partition_target_info.fs_type == "squashfs")
+    # Disable imgdiff because it relies on zlib to produce stable output
+    # across different versions, which is often not the case.
     return common.BlockDifference(name, partition_tgt, partition_src,
                                   check_first_block,
                                   version=blockimgdiff_version,
-                                  disable_imgdiff=disable_imgdiff)
+                                  disable_imgdiff=True)
 
   if source_zip:
     # See notes in common.GetUserImage()
@@ -282,7 +277,7 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
 
 
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
@@ -409,7 +404,7 @@
   if updating_boot:
     boot_type, boot_device_expr = common.GetTypeAndDeviceExpr("/boot",
                                                               source_info)
-    d = common.Difference(target_boot, source_boot)
+    d = common.Difference(target_boot, source_boot, "bsdiff")
     _, _, d = d.ComputePatch()
     if d is None:
       include_full_boot = True
@@ -537,7 +532,7 @@
   needed_property_files = (
       NonAbOtaPropertyFiles(),
   )
-  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files)
+  FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
 
 
 def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index d6c39c6..043f6ee 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -727,30 +727,34 @@
 
   Returns:
     The filename of a target-files.zip which has renamed the custom images in
-    the IMAGS/ to their partition names.
+    the IMAGES/ to their partition names.
   """
-  # Use zip2zip to avoid extracting the zipfile.
+
+  # First pass: use zip2zip to copy the target files contents, excluding
+  # the "custom" images that will be replaced.
   target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
   cmd = ['zip2zip', '-i', input_file, '-o', target_file]
 
-  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
-    namelist = input_zip.namelist()
-
-  # Write {custom_image}.img as {custom_partition}.img.
+  images = {}
   for custom_partition, custom_image in custom_images.items():
     default_custom_image = '{}.img'.format(custom_partition)
     if default_custom_image != custom_image:
-      logger.info("Update custom partition '%s' with '%s'",
-                  custom_partition, custom_image)
-      # Default custom image need to be deleted first.
-      namelist.remove('IMAGES/{}'.format(default_custom_image))
-      # IMAGES/{custom_image}.img:IMAGES/{custom_partition}.img.
-      cmd.extend(['IMAGES/{}:IMAGES/{}'.format(custom_image,
-                                               default_custom_image)])
+      src = 'IMAGES/' + custom_image
+      dst = 'IMAGES/' + default_custom_image
+      cmd.extend(['-x', dst])
+      images[dst] = src
 
-  cmd.extend(['{}:{}'.format(name, name) for name in namelist])
   common.RunAndCheckOutput(cmd)
 
+  # Second pass: write {custom_image}.img as {custom_partition}.img.
+  with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
+    with zipfile.ZipFile(target_file, 'a', allowZip64=True) as output_zip:
+      for dst, src in images.items():
+        data = input_zip.read(src)
+        logger.info("Update custom partition '%s'", dst)
+        common.ZipWriteStr(output_zip, dst, data)
+      output_zip.close()
+
   return target_file
 
 
@@ -893,6 +897,10 @@
           (source_info is not None and not source_info.is_vabc_xor):
     logger.info("VABC XOR Not supported, disabling")
     OPTIONS.enable_vabc_xor = False
+
+  if OPTIONS.vabc_compression_param == "none":
+    logger.info("VABC Compression algorithm is set to 'none', disabling VABC xor")
+    OPTIONS.enable_vabc_xor = False
   additional_args = []
 
   # Prepare custom images.
@@ -923,7 +931,7 @@
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = PayloadGenerator(OPTIONS.include_secondary, OPTIONS.wipe_user_data)
+  payload = PayloadGenerator(wipe_user_data=OPTIONS.wipe_user_data)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -1341,6 +1349,14 @@
     source_spl = source_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     target_spl = target_build_prop.GetProp(SECURITY_PATCH_LEVEL_PROP_NAME)
     is_spl_downgrade = target_spl < source_spl
+    if is_spl_downgrade and target_build_prop.GetProp("ro.build.tags") == "release-keys":
+      raise common.ExternalError(
+          "Target security patch level {} is older than source SPL {} "
+          "A locked bootloader will reject SPL downgrade no matter "
+          "what(even if data wipe is done), so SPL downgrade on any "
+          "release-keys build is not allowed.".format(target_spl, source_spl))
+
+    logger.info("SPL downgrade on %s", target_build_prop.GetProp("ro.build.tags"))
     if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
       raise common.ExternalError(
           "Target security patch level {} is older than source SPL {} applying "
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index e36a2be..e2ce31d 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -132,8 +132,10 @@
 
   # Re-sign the package after updating the metadata entry.
   if no_signing:
+    logger.info(f"Signing disabled for output file {output_file}")
     shutil.copy(prelim_signing, output_file)
   else:
+    logger.info(f"Signing the output file {output_file} with key {package_key}")
     SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 0dd0790..4a12e74 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -532,11 +532,13 @@
       # as a symlink in the current code. So it's a no-op here. Keeping the
       # path here for clarity.
       # Some build props might be stored under path
-      # VENDOR_BOOT/RAMDISK_FRAGMENTS/recovery/RAMDISK/default.prop
-      # so overwrite all files that ends with build.prop or default.prop
+      # VENDOR_BOOT/RAMDISK_FRAGMENTS/recovery/RAMDISK/default.prop, and
+      # default.prop can be a symbolic link to prop.default, so overwrite all
+      # files that ends with build.prop, default.prop or prop.default
       "RECOVERY/RAMDISK/default.prop") or \
         filename.endswith("build.prop") or \
-        filename.endswith("/default.prop")
+        filename.endswith("/default.prop") or \
+        filename.endswith("/prop.default")
 
 
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
diff --git a/tools/releasetools/test_check_target_files_vintf.py b/tools/releasetools/test_check_target_files_vintf.py
index 8725dd6..7c154d7 100644
--- a/tools/releasetools/test_check_target_files_vintf.py
+++ b/tools/releasetools/test_check_target_files_vintf.py
@@ -15,6 +15,7 @@
 #
 
 import os.path
+import shutil
 
 import common
 import test_utils
@@ -86,6 +87,28 @@
 
     return test_dir
 
+  # Prepare test dir with required HAL for APEX testing
+  def prepare_apex_test_dir(self, test_delta_rel_path):
+    test_dir = self.prepare_test_dir(test_delta_rel_path)
+    write_string_to_file(
+        """<compatibility-matrix version="1.0" level="1" type="framework">
+            <hal format="aidl" optional="false" updatable-via-apex="true">
+                <name>android.apex.foo</name>
+                <version>1</version>
+                <interface>
+                    <name>IApex</name>
+                    <instance>default</instance>
+                </interface>
+            </hal>
+            <sepolicy>
+                <sepolicy-version>0.0</sepolicy-version>
+                <kernel-sepolicy-version>0</kernel-sepolicy-version>
+            </sepolicy>
+        </compatibility-matrix>""",
+        os.path.join(test_dir, 'SYSTEM/etc/vintf/compatibility_matrix.1.xml'))
+
+    return test_dir
+
   @test_utils.SkipIfExternalToolsUnavailable()
   def test_CheckVintf_skeleton(self):
     msg = 'vintf check with skeleton target files failed.'
@@ -143,3 +166,25 @@
                          os.path.join(test_dir, 'VENDOR/etc/vintf/manifest.xml'))
     # Should raise an error because a file has invalid format.
     self.assertRaises(common.ExternalError, CheckVintf, test_dir)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_compat(self):
+    apex_file_name = 'com.android.apex.vendor.foo.with_vintf.apex'
+    msg = 'vintf/apex_compat should be compatible because ' \
+          'APEX %s has the required HALs' % (apex_file_name)
+    test_dir = self.prepare_apex_test_dir('vintf/apex_compat')
+    # Copy APEX under VENDOR/apex
+    apex_file = os.path.join(test_utils.get_current_dir(), apex_file_name)
+    apex_dir = os.path.join(test_dir, 'VENDOR/apex')
+    os.makedirs(apex_dir)
+    shutil.copy(apex_file, apex_dir)
+    # Should find required HAL via APEX
+    self.assertTrue(CheckVintf(test_dir), msg=msg)
+
+  @test_utils.SkipIfExternalToolsUnavailable()
+  def test_CheckVintf_apex_incompat(self):
+    msg = 'vintf/apex_incompat should be incompatible because ' \
+          'no APEX data'
+    test_dir = self.prepare_apex_test_dir('vintf/apex_incompat')
+    # Should not find required HAL
+    self.assertFalse(CheckVintf(test_dir), msg=msg)
diff --git a/tools/signapk/Android.bp b/tools/signapk/Android.bp
index bee6a6f..c4f25f8 100644
--- a/tools/signapk/Android.bp
+++ b/tools/signapk/Android.bp
@@ -31,6 +31,10 @@
         "conscrypt-unbundled",
     ],
 
+    // b/267608166: Prevent target Java 17 so the host-side tool can run in an
+    // environment where JDK 11 is available.
+    java_version: "11",
+
     jni_libs: ["libconscrypt_openjdk_jni"],
 
     // The post-build signing tools need signapk.jar (and its shared libraries,