Merge "Let the finalization script create ABI dumps for all architectures"
diff --git a/core/Makefile b/core/Makefile
index 85f33bc..47c603c 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -5122,6 +5122,7 @@
   check_target_files_signatures \
   check_target_files_vintf \
   checkvintf \
+  create_brick_ota \
   delta_generator \
   e2fsck \
   e2fsdroid \
@@ -5539,10 +5540,12 @@
 name := $(name)-target_files-$(FILE_NAME_TAG)
 
 intermediates := $(call intermediates-dir-for,PACKAGING,target_files)
+BUILT_TARGET_FILES_DIR := $(intermediates)/$(name).zip.list
 BUILT_TARGET_FILES_PACKAGE := $(intermediates)/$(name).zip
-$(BUILT_TARGET_FILES_PACKAGE): intermediates := $(intermediates)
-$(BUILT_TARGET_FILES_PACKAGE): \
-	    zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_PACKAGE): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates)
+
 
 # $(1): Directory to copy
 # $(2): Location to copy it to
@@ -5562,10 +5565,10 @@
     $(call intermediates-dir-for,EXECUTABLES,updater)/updater
 endif
 
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_OTA_TOOLS := $(built_ota_tools)
 
 tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_TOOL_EXTENSION := $(tool_extension)
 
 updater_dep :=
 ifeq ($(AB_OTA_UPDATER),true)
@@ -5581,23 +5584,23 @@
 updater_dep += $(built_ota_tools)
 endif
 
-$(BUILT_TARGET_FILES_PACKAGE): $(updater_dep)
+$(BUILT_TARGET_FILES_DIR): $(updater_dep)
 
 # If we are using recovery as boot, output recovery files to BOOT/.
 # If we are moving recovery resources to vendor_boot, output recovery files to VENDOR_BOOT/.
 ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := BOOT
 else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
 else
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := RECOVERY
 endif
 
 ifeq ($(AB_OTA_UPDATER),true)
   ifdef OSRELEASED_DIRECTORY
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
-    $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
+    $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
   endif
 
   # Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
@@ -5705,28 +5708,28 @@
 # full system image deps, we speed up builds that do not build the system
 # image.
 ifdef BUILDING_SYSTEM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+  $(BUILT_TARGET_FILES_DIR): $(FULL_SYSTEMIMAGE_DEPS)
 else
   # releasetools may need the system build.prop even when building a
   # system-image-less product.
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BUILD_PROP_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BUILD_PROP_TARGET)
 endif
 
 ifdef BUILDING_USERDATA_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_USERDATAIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_USERDATAIMAGE_FILES)
 endif
 
 ifdef BUILDING_SYSTEM_OTHER_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
 endif
 
 ifdef BUILDING_VENDOR_BOOT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
   # The vendor ramdisk may be built from the recovery ramdisk.
   ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
-    $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+    $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
   endif
 endif
 
@@ -5736,11 +5739,11 @@
   # commands in build-recoveryimage-target, which would touch the files under
   # TARGET_RECOVERY_OUT and race with packaging target-files.zip.
   ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-    $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+    $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
   else
-    $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_RECOVERYIMAGE_TARGET)
+    $(BUILT_TARGET_FILES_DIR): $(INSTALLED_RECOVERYIMAGE_TARGET)
   endif
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERYIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERYIMAGE_FILES)
 endif
 
 # Conditionally depend on the image files if the image is being built so the
@@ -5748,68 +5751,68 @@
 # if it is coming from a prebuilt.
 
 ifdef BUILDING_VENDOR_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDORIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDORIMAGE_FILES)
 else ifdef BOARD_PREBUILT_VENDORIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDORIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDORIMAGE_TARGET)
 endif
 
 ifdef BUILDING_PRODUCT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_PRODUCTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_PRODUCTIMAGE_FILES)
 else ifdef BOARD_PREBUILT_PRODUCTIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_PRODUCTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_PRODUCTIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_EXT_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
 else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
 endif
 
 ifneq (,$(BUILDING_BOOT_IMAGE)$(BUILDING_INIT_BOOT_IMAGE))
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RAMDISK_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RAMDISK_FILES)
 endif  # BUILDING_BOOT_IMAGE != "" || BUILDING_INIT_BOOT_IMAGE != ""
 
 ifneq (,$(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES)))
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODMIMAGE_FILES)
 else ifdef BOARD_PREBUILT_ODMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_VENDOR_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
 else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_ODM_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODM_DLKMIMAGE_FILES)
 else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
 endif
 
 ifdef BUILDING_SYSTEM_DLKM_IMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+  $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
 else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
-  $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+  $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
 endif
 
 ifeq ($(BUILD_QEMU_IMAGES),true)
   MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
-  $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+  $(BUILT_TARGET_FILES_DIR): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
 endif
 
 ifdef BOARD_PREBUILT_BOOTLOADER
-$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+$(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTLOADER_MODULE)
 droidcore-unbundled: $(INSTALLED_BOOTLOADER_MODULE)
 endif
 
 # Depending on the various images guarantees that the underlying
 # directories are up-to-date.
-$(BUILT_TARGET_FILES_PACKAGE): \
+$(BUILT_TARGET_FILES_DIR): \
 	    $(INSTALLED_RADIOIMAGE_TARGET) \
 	    $(INSTALLED_RECOVERYIMAGE_TARGET) \
 	    $(INSTALLED_CACHEIMAGE_TARGET) \
@@ -5846,7 +5849,7 @@
 	    $(BUILT_KERNEL_CONFIGS_FILE) \
 	    $(BUILT_KERNEL_VERSION_FILE) \
 	    | $(ACP)
-	@echo "Package target files: $@"
+	@echo "Building target files: $@"
 	$(hide) rm -rf $@ $@.list $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
 ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
@@ -6249,13 +6252,19 @@
 endif
 	@# Zip everything up, preserving symlinks and placing META/ files first to
 	@# help early validation of the .zip file while uploading it.
-	$(hide) find $(zip_root)/META | sort >$@.list
-	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
+	$(hide) find $(zip_root)/META | sort >$@
+	$(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@
+
+$(BUILT_TARGET_FILES_PACKAGE): $(BUILT_TARGET_FILES_DIR)
+	@echo "Packaging target files: $@"
 	$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -r $@.list
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
 
+.PHONY: target-files-dir
+target-files-dir: $(BUILT_TARGET_FILES_DIR)
+
 $(call declare-1p-container,$(BUILT_TARGET_FILES_PACKAGE),)
 $(call declare-container-license-deps,$(BUILT_TARGET_FILES_PACKAGE), $(INSTALLED_RADIOIMAGE_TARGET) \
             $(INSTALLED_RECOVERYIMAGE_TARGET) \
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 718adb5..c52fa92 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -101,6 +101,9 @@
 endif
 
 $(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
+ifdef ART_DEBUG_OPT_FLAG
+$(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
+endif
 
 ifdef TARGET_BOARD_AUTO
   $(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
diff --git a/core/config.mk b/core/config.mk
index 26e90ef..5b02569 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -783,13 +783,6 @@
   ifneq ($(call numbers_less_than,$(min_systemsdk_version),$(BOARD_SYSTEMSDK_VERSIONS)),)
     $(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to BOARD_API_LEVEL, BOARD_SHIPPING_API_LEVEL or PRODUCT_SHIPPING_API_LEVEL ($(min_systemsdk_version)))
   endif
-  ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),28),)
-    ifneq ($(TARGET_IS_64_BIT), true)
-      ifneq ($(TARGET_USES_64_BIT_BINDER), true)
-        $(error When PRODUCT_SHIPPING_API_LEVEL >= 28, TARGET_USES_64_BIT_BINDER must be true)
-      endif
-    endif
-  endif
   ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
     ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
       $(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
diff --git a/core/definitions.mk b/core/definitions.mk
index ce1248e..e4cee7a 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -597,7 +597,7 @@
 define declare-copy-target-license-metadata
 $(strip $(if $(filter $(OUT_DIR)%,$(2)),\
   $(eval _tgt:=$(strip $(1)))\
-  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+  $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(sort $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2))))\
   $(eval ALL_COPIED_TARGETS += $(_tgt))))
 endef
 
@@ -2960,7 +2960,7 @@
   $(extract-package) \
   echo "Module name in Android tree: $(PRIVATE_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo "Local path in Android tree: $(PRIVATE_PATH)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
-  echo "Install path on $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT): $(PRIVATE_INSTALLED_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+  echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
   echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
 endef
 ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index 9079981..e149ef4 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -1,6 +1,6 @@
 include $(CLEAR_VARS)
 
-enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro_$(enforce_rro_partition)
+enforce_rro_module := $(enforce_rro_source_module)__$(PRODUCT_NAME)__auto_generated_rro_$(enforce_rro_partition)
 LOCAL_PACKAGE_NAME := $(enforce_rro_module)
 
 intermediates := $(call intermediates-dir-for,APPS,$(LOCAL_PACKAGE_NAME),,COMMON)
diff --git a/core/main.mk b/core/main.mk
index e84dfaa..6f0b927 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -320,6 +320,10 @@
     ro.board.platform=$(TARGET_BOARD_PLATFORM) \
     ro.hwui.use_vulkan=$(TARGET_USES_VULKAN)
 
+# Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
+ADDITIONAL_VENDOR_PROPERTIES += \
+    ro.gfx.angle.supported=$(USE_ANGLE)
+
 ifdef TARGET_SCREEN_DENSITY
 ADDITIONAL_VENDOR_PROPERTIES += \
     ro.sf.lcd_density=$(TARGET_SCREEN_DENSITY)
@@ -1386,29 +1390,6 @@
     $(CUSTOM_MODULES) \
   )
 
-ifdef FULL_BUILD
-#
-# Used by the cleanup logic in soong_ui to remove files that should no longer
-# be installed.
-#
-
-# Include all tests, so that we remove them from the test suites / testcase
-# folders when they are removed.
-test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
-
-$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
-
-$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
-  $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
-    $(modules_to_install) $(test_files)))))
-
-$(file >$(HOST_OUT)/.installable_test_files,$(sort \
-  $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
-    $(test_files)))))
-
-test_files :=
-endif
-
 # Dedpulicate compatibility suite dist files across modules and packages before
 # copying them to their requested locations. Assign the eval result to an unused
 # var to prevent Make from trying to make a sense of it.
@@ -1467,6 +1448,28 @@
 modules_to_install := $(sort $(ALL_DEFAULT_INSTALLED_MODULES))
 ALL_DEFAULT_INSTALLED_MODULES :=
 
+ifdef FULL_BUILD
+#
+# Used by the cleanup logic in soong_ui to remove files that should no longer
+# be installed.
+#
+
+# Include all tests, so that we remove them from the test suites / testcase
+# folders when they are removed.
+test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
+
+$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
+
+$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
+  $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
+    $(modules_to_install) $(test_files)))))
+
+$(file >$(HOST_OUT)/.installable_test_files,$(sort \
+  $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
+    $(test_files)))))
+
+test_files :=
+endif
 
 # Some notice deps refer to module names without prefix or arch suffix where
 # only the variants with them get built.
@@ -2197,12 +2200,19 @@
 
 $(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
 else
-apps_only_sbom_files := $(sort $(patsubst %,%.spdx,$(apps_only_installed_files)))
+apps_only_sbom_files := $(sort $(patsubst %,%.spdx.json,$(filter %.apk,$(apps_only_installed_files))))
 $(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --unbundled
 
 sbom: $(apps_only_sbom_files)
+
+$(foreach f,$(apps_only_sbom_files),$(eval $(patsubst %.spdx.json,%-fragment.spdx,$f): $f))
+apps_only_fragment_files := $(patsubst %.spdx.json,%-fragment.spdx,$(apps_only_sbom_files))
+$(foreach f,$(apps_only_fragment_files),$(eval apps_only_fragment_dist_files += :sbom/$(notdir $f)))
+
+$(foreach f,$(apps_only_sbom_files),$(eval apps_only_sbom_dist_files += :sbom/$(notdir $f)))
+$(call dist-for-goals,apps_only,$(join $(apps_only_sbom_files),$(apps_only_sbom_dist_files)) $(join $(apps_only_fragment_files),$(apps_only_fragment_dist_files)))
 endif
 
 $(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
diff --git a/core/package_internal.mk b/core/package_internal.mk
index c7a173b..2d0a569 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -201,10 +201,10 @@
 all_resources := $(strip $(my_res_resources) $(my_overlay_resources))
 
 # The linked resource package.
-my_res_package := $(intermediates)/package-res.apk
+my_res_package := $(intermediates.COMMON)/package-res.apk
 LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
 
-my_bundle_module := $(intermediates)/base.zip
+my_bundle_module := $(intermediates.COMMON)/base.zip
 LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
 
 # Always run aapt2, because we need to at least compile the AndroidManifest.xml.
@@ -570,7 +570,7 @@
 	$(compress-package)
 endif  # LOCAL_COMPRESSED_MODULE
 
-my_package_res_pb := $(intermediates)/package-res.pb.apk
+my_package_res_pb := $(intermediates.COMMON)/package-res.pb.apk
 $(my_package_res_pb): $(my_res_package) $(AAPT2)
 	$(AAPT2) convert --output-format proto $< -o $@
 
diff --git a/core/product.mk b/core/product.mk
index cdc3d09..1789561 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -41,7 +41,6 @@
 _product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
 _product_list_vars += PRODUCT_PACKAGES_ENG
 _product_list_vars += PRODUCT_PACKAGES_TESTS
-_product_list_vars += PRODUCT_AFDO_PROFILES
 
 # The device that this product maps to.
 _product_single_value_vars += PRODUCT_DEVICE
@@ -396,6 +395,8 @@
 #   supports it
 _product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
 
+_product_list_vars += PRODUCT_AFDO_PROFILES
+
 .KATI_READONLY := _product_single_value_vars _product_list_vars
 _product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
 
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 6348cf0..346edcf 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -12,6 +12,16 @@
 include $(BUILD_SYSTEM)/art_config.mk
 include $(BUILD_SYSTEM)/dex_preopt_config.mk
 
+ifndef AFDO_PROFILES
+# Set AFDO_PROFILES
+-include vendor/google_data/pgo_profile/sampling/afdo_profiles.mk
+else
+$(error AFDO_PROFILES can only be set from soong_config.mk. For product-specific fdo_profiles, please use PRODUCT_AFDO_PROFILES)
+endif
+
+# PRODUCT_AFDO_PROFILES takes precedence over product-agnostic profiles in AFDO_PROFILES
+ALL_AFDO_PROFILES := $(PRODUCT_AFDO_PROFILES) $(AFDO_PROFILES)
+
 ifeq ($(WRITE_SOONG_VARIABLES),true)
 
 # Create soong.variables with copies of makefile settings.  Runs every build,
@@ -309,7 +319,7 @@
 $(call add_json_list, IncludeTags,                $(PRODUCT_INCLUDE_TAGS))
 $(call add_json_list, SourceRootDirs,             $(PRODUCT_SOURCE_ROOT_DIRS))
 
-$(call add_json_list, AfdoProfiles,                $(PRODUCT_AFDO_PROFILES))
+$(call add_json_list, AfdoProfiles,                $(ALL_AFDO_PROFILES))
 
 $(call json_end)
 
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 9400890..c770b34 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -30,8 +30,6 @@
 out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
 test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
 test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/tradefed-no-fwk.jar \
-  $(HOST_OUT_JAVA_LIBRARIES)/tradefed-test-framework.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/loganalysis.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
   $(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed.jar \
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index b160648..9e9e74b 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,7 +40,7 @@
   include $(INTERNAL_BUILD_ID_MAKEFILE)
 endif
 
-DEFAULT_PLATFORM_VERSION := UP1A
+DEFAULT_PLATFORM_VERSION := VP1A
 .KATI_READONLY := DEFAULT_PLATFORM_VERSION
 MIN_PLATFORM_VERSION := UP1A
 MAX_PLATFORM_VERSION := VP1A
@@ -91,7 +91,7 @@
 Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
 Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
 JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
-Q R S Sv2 Tiramisu UpsideDownCake
+Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
 
 # Convert from space separated list to comma separated
 PLATFORM_VERSION_KNOWN_CODENAMES := \
diff --git a/envsetup.sh b/envsetup.sh
index 905635c..17d8a5d 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1096,12 +1096,12 @@
 #
 # Easy way to make system.img/etc writable
 function syswrite() {
-  adb wait-for-device && adb root || return 1
+  adb wait-for-device && adb root && adb wait-for-device || return 1
   if [[ $(adb disable-verity | grep -i "reboot") ]]; then
       echo "rebooting"
-      adb reboot && adb wait-for-device && adb root || return 1
+      adb reboot && adb wait-for-device && adb root && adb wait-for-device || return 1
   fi
-  adb wait-for-device && adb remount || return 1
+  adb remount || return 1
 }
 
 # coredump_setup - enable core dumps globally for any process
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 4d95b33..67e31df 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -36,6 +36,7 @@
 TARGET_COPY_OUT_PRODUCT := system/product
 TARGET_COPY_OUT_SYSTEM_EXT := system/system_ext
 BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE :=
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE :=
 
 # Creates metadata partition mount point under root for
 # the devices with metadata parition
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 00f6e5b..e5ac5cf 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -14,6 +14,8 @@
 TARGET_COPY_OUT_SYSTEM_EXT := system_ext
 TARGET_COPY_OUT_VENDOR := vendor
 TARGET_COPY_OUT_PRODUCT := product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
 
 # Creates metadata partition mount point under root for
 # the devices with metadata parition
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
index dc84192..d221e64 100644
--- a/target/board/emulator_arm64/device.mk
+++ b/target/board/emulator_arm64/device.mk
@@ -17,12 +17,3 @@
 PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
 PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
 
-# Cuttlefish has GKI kernel prebuilts, so use those for the GKI boot.img.
-ifeq ($(TARGET_PREBUILT_KERNEL),)
-    LOCAL_KERNEL := kernel/prebuilts/5.4/arm64/kernel-5.4-lz4
-else
-    LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
-endif
-
-PRODUCT_COPY_FILES += \
-    $(LOCAL_KERNEL):kernel
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index b8ca3aa..75fd3c8 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -58,6 +58,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 5f200aa..61c1316 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -57,6 +57,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_arm,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index ffc37a9..6c907db 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -62,6 +62,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_arm64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index 0e5d9fe..270a989 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -53,6 +53,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_riscv64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 669874e..a2f0390 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -55,6 +55,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_x86,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index deaa3b1..535ee3f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -64,6 +64,9 @@
 # Special settings for GSI releasing
 #
 ifeq (aosp_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
 endif
 
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 09d4bc9..3b97792 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -36,7 +36,7 @@
 # GSI should always support up-to-date platform features.
 # Keep this value at the latest API level to ensure latest build system
 # default configs are applied.
-PRODUCT_SHIPPING_API_LEVEL := 31
+PRODUCT_SHIPPING_API_LEVEL := 34
 
 # Enable dynamic partitions to facilitate mixing onto Cuttlefish
 PRODUCT_USE_DYNAMIC_PARTITIONS := true
@@ -88,9 +88,6 @@
 PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false
 PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true
 
-# Always build modules from source
-MODULE_BUILD_FROM_SOURCE := true
-
 # Additional settings used in all GSI builds
 PRODUCT_PRODUCT_PROPERTIES += \
     ro.crypto.metadata_init_delete_all_keys.enabled=false \
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
index c378e39..f61289e 100644
--- a/tools/compliance/cmd/sbom/sbom.go
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -55,6 +55,7 @@
 	product      string
 	stripPrefix  []string
 	creationTime creationTimeGetter
+	buildid      string
 }
 
 func (ctx context) strip(installPath string) string {
@@ -124,6 +125,7 @@
 	depsFile := flags.String("d", "", "Where to write the deps file")
 	product := flags.String("product", "", "The name of the product for which the notice is generated.")
 	stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+	buildid := flags.String("build_id", "", "Uniquely identifies the build. (default timestamp)")
 
 	flags.Parse(expandedArgs)
 
@@ -162,7 +164,7 @@
 		ofile = obuf
 	}
 
-	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+	ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime, *buildid}
 
 	spdxDoc, deps, err := sbomGenerator(ctx, flags.Args()...)
 
@@ -317,14 +319,21 @@
 }
 
 // generateSPDXNamespace generates a unique SPDX Document Namespace using a SHA1 checksum
-// and the CreationInfo.Created field as the date.
-func generateSPDXNamespace(created string) string {
-	// Compute a SHA1 checksum of the CreationInfo.Created field.
-	hash := sha1.Sum([]byte(created))
-	checksum := hex.EncodeToString(hash[:])
+func generateSPDXNamespace(buildid string, created string, files ...string) string {
 
-	// Combine the checksum and timestamp to generate the SPDX Namespace.
-	namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s-%s", created, checksum)
+	seed := strings.Join(files, "")
+
+	if buildid == "" {
+		seed += created
+	} else {
+		seed += buildid
+	}
+
+	// Compute a SHA1 checksum of the seed.
+	hash := sha1.Sum([]byte(seed))
+	uuid := hex.EncodeToString(hash[:])
+
+	namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s", uuid)
 
 	return namespace
 }
@@ -523,7 +532,7 @@
 		DataLicense:       "CC0-1.0",
 		SPDXIdentifier:    "DOCUMENT",
 		DocumentName:      docName,
-		DocumentNamespace: generateSPDXNamespace(ci.Created),
+		DocumentNamespace: generateSPDXNamespace(ctx.buildid, ci.Created, files...),
 		CreationInfo:      ci,
 		Packages:          pkgs,
 		Relationships:     relationships,
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
index 6472f51..8a62713 100644
--- a/tools/compliance/cmd/sbom/sbom_test.go
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -59,7 +59,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-firstparty-highest.apex",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/highest.apex.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -187,7 +187,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-firstparty-application",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/application.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -266,7 +266,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-firstparty-container.zip",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/container.zip.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -394,7 +394,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-firstparty-bin-bin1",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/bin/bin1.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -460,7 +460,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-firstparty-lib-libd.so",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/lib/libd.so.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -500,7 +500,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-notice-highest.apex",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/highest.apex.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -634,7 +634,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-notice-container.zip",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/container.zip.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -768,7 +768,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-notice-application",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/application.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -853,7 +853,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-notice-bin-bin1",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/bin/bin1.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -925,7 +925,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-notice-lib-libd.so",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/lib/libd.so.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -965,7 +965,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-reciprocal-highest.apex",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/highest.apex.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1105,7 +1105,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-reciprocal-application",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/application.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1196,7 +1196,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-reciprocal-bin-bin1",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/bin/bin1.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1268,7 +1268,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-reciprocal-lib-libd.so",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/lib/libd.so.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1308,7 +1308,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-restricted-highest.apex",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/highest.apex.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1454,7 +1454,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-restricted-container.zip",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/container.zip.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1600,7 +1600,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-restricted-bin-bin1",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/bin/bin1.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1678,7 +1678,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-restricted-lib-libd.so",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/lib/libd.so.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1718,7 +1718,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-proprietary-highest.apex",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/highest.apex.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -1864,7 +1864,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-proprietary-container.zip",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/container.zip.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -2010,7 +2010,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-proprietary-application",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/application.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -2101,7 +2101,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-proprietary-bin-bin1",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/bin/bin1.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -2173,7 +2173,7 @@
 				DataLicense:       "CC0-1.0",
 				SPDXIdentifier:    "DOCUMENT",
 				DocumentName:      "testdata-proprietary-lib-libd.so",
-				DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+				DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/lib/libd.so.meta_lic"),
 				CreationInfo:      getCreationInfo(t),
 				Packages: []*spdx.Package{
 					{
@@ -2215,7 +2215,7 @@
 				rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
 			}
 
-			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime}
+			ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime, ""}
 
 			spdxDoc, deps, err := sbomGenerator(&ctx, rootFiles...)
 			if err != nil {
@@ -2262,6 +2262,96 @@
 	}
 }
 
+func TestGenerateSPDXNamespace(t *testing.T) {
+
+	buildID1 := "example-1"
+	buildID2 := "example-2"
+	files1 := "file1"
+	timestamp1 := "2022-05-01"
+	timestamp2 := "2022-05-02"
+	files2 := "file2"
+
+	// Test case 1: different timestamps, same files
+	nsh1 := generateSPDXNamespace("", timestamp1, files1)
+	nsh2 := generateSPDXNamespace("", timestamp2, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp2, files1)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp2, files1)
+	}
+
+	// Test case 2: different build ids, same timestamps and files
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID2, timestamp1, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID2, timestamp1, files1)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID2, timestamp1, files1)
+	}
+
+	// Test case 3: same build ids and files, different timestamps
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID1, timestamp2, files1)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp2, files1)
+	}
+
+	if nsh1 != nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected same namespace hashes, but got different: %s and %s", buildID1, timestamp1, files1, buildID2, timestamp1, files1, nsh1, nsh2)
+	}
+
+	// Test case 4: same build ids and timestamps, different files
+	nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+	nsh2 = generateSPDXNamespace(buildID1, timestamp1, files2)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files2)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID1, timestamp1, files2)
+	}
+
+	// Test case 5: empty build ids, same timestamps and different files
+	nsh1 = generateSPDXNamespace("", timestamp1, files1)
+	nsh2 = generateSPDXNamespace("", timestamp1, files2)
+
+	if nsh1 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+	}
+
+	if nsh2 == "" {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files2)
+	}
+
+	if nsh1 == nsh2 {
+		t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp1, files2)
+	}
+}
+
 func getCreationInfo(t *testing.T) *spdx.CreationInfo {
 	ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
 	if err != nil {
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index 8c838aa..892361d 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -2,17 +2,18 @@
 
 set -ex
 
-export FINAL_BUG_ID='0'
+export FINAL_BUG_ID='0' # CI only
 
-export FINAL_PLATFORM_CODENAME='UpsideDownCake'
-export CURRENT_PLATFORM_CODENAME='UpsideDownCake'
-export FINAL_PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
-export FINAL_PLATFORM_SDK_VERSION='34'
-export FINAL_PLATFORM_VERSION='14'
+export FINAL_PLATFORM_CODENAME='VanillaIceCream'
+export CURRENT_PLATFORM_CODENAME='VanillaIceCream'
+export FINAL_PLATFORM_CODENAME_JAVA='VANILLA_ICE_CREAM'
+export FINAL_BUILD_PREFIX='VP1A'
+export FINAL_PLATFORM_VERSION='15'
 
-export FINAL_BUILD_PREFIX='UP1A'
-
-export FINAL_MAINLINE_EXTENSION='7'
+# Set arbitrary large values for CI.
+# Feel free to randomize them once in a while to detect buggy version detection code.
+export FINAL_PLATFORM_SDK_VERSION='97'
+export FINAL_MAINLINE_EXTENSION='98'
 
 # Options:
 # 'unfinalized' - branch is in development state,
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index 6cf4124..714b8a8 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -56,7 +56,7 @@
     mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
     cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
 
-    if [ "$FINAL_STATE" != "sdk" ] ; then
+    if [ "$FINAL_STATE" != "sdk" || "$FINAL_PLATFORM_CODENAME" == "$CURRENT_PLATFORM_CODENAME" ] ; then
         # prebuilts/abi-dumps/vndk
         mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
     fi;
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 4e783ff..699c8b2 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -717,26 +717,46 @@
       script.AssertOemProperty(prop, values, oem_no_mount)
 
 
-def ReadFromInputFile(input_file, fn):
-  """Reads the contents of fn from input zipfile or directory."""
+def DoesInputFileContain(input_file, fn):
+  """Check whether the input target_files.zip contain an entry `fn`"""
   if isinstance(input_file, zipfile.ZipFile):
-    return input_file.read(fn).decode()
+    return fn in input_file.namelist()
   elif zipfile.is_zipfile(input_file):
     with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
-      return zfp.read(fn).decode()
+      return fn in zfp.namelist()
+  else:
+    if not os.path.isdir(input_file):
+      raise ValueError(
+          "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+    path = os.path.join(input_file, *fn.split("/"))
+    return os.path.exists(path)
+
+
+def ReadBytesFromInputFile(input_file, fn):
+  """Reads the bytes of fn from input zipfile or directory."""
+  if isinstance(input_file, zipfile.ZipFile):
+    return input_file.read(fn)
+  elif zipfile.is_zipfile(input_file):
+    with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+      return zfp.read(fn)
   else:
     if not os.path.isdir(input_file):
       raise ValueError(
           "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
     path = os.path.join(input_file, *fn.split("/"))
     try:
-      with open(path) as f:
+      with open(path, "rb") as f:
         return f.read()
     except IOError as e:
       if e.errno == errno.ENOENT:
         raise KeyError(fn)
 
 
+def ReadFromInputFile(input_file, fn):
+  """Reads the str contents of fn from input zipfile or directory."""
+  return ReadBytesFromInputFile(input_file, fn).decode()
+
+
 def ExtractFromInputFile(input_file, fn):
   """Extracts the contents of fn from input zipfile or directory into a file."""
   if isinstance(input_file, zipfile.ZipFile):
@@ -1540,7 +1560,8 @@
 
   custom_partitions = OPTIONS.info_dict.get(
       "avb_custom_images_partition_list", "").strip().split()
-  custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()]
+  custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get(
+      "avb_custom_vbmeta_images_partition_list", "").strip().split()]
 
   for partition, path in partitions.items():
     if partition not in needed_partitions:
@@ -1906,7 +1927,7 @@
   data = _BuildBootableImage(prebuilt_name, os.path.join(unpack_dir, tree_subdir),
                              os.path.join(unpack_dir, fs_config),
                              os.path.join(unpack_dir, 'META/ramdisk_node_list')
-                                if dev_nodes else None,
+                             if dev_nodes else None,
                              info_dict, has_ramdisk, two_step_image)
   if data:
     return File(name, data)
@@ -2966,7 +2987,6 @@
       cmd.append(entry)
     RunAndCheckOutput(cmd)
 
-
   os.replace(new_zipfile, zip_filename)
 
 
@@ -4071,6 +4091,7 @@
     # https://source.android.com/devices/bootloader/images
     return fp.read(4) == b'\x3A\xFF\x26\xED'
 
+
 def ParseUpdateEngineConfig(path: str):
   """Parse the update_engine config stored in file `path`
   Args
@@ -4092,4 +4113,4 @@
     if not minor:
       raise ValueError(
           f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
-    return (int(major.group(1)), int(minor.group(1)))
\ No newline at end of file
+    return (int(major.group(1)), int(minor.group(1)))
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index c284338..b5683a8 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -181,6 +181,7 @@
 
 _PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
 _IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
+_PREBUILT_IMAGE_PARTITION_PATTERN = re.compile(r'^PREBUILT_IMAGES/(.*)\.img$')
 
 
 def ItemListToPartitionSet(item_list):
@@ -203,12 +204,12 @@
   partition_set = set()
 
   for item in item_list:
-    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+    for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN, _PREBUILT_IMAGE_PARTITION_PATTERN):
       partition_match = pattern.search(item.strip())
       if partition_match:
         partition = partition_match.group(1).lower()
         # These directories in target-files are not actual partitions.
-        if partition not in ('meta', 'images'):
+        if partition not in ('meta', 'images', 'prebuilt_images'):
           partition_set.add(partition)
 
   return partition_set
@@ -217,7 +218,7 @@
 # Partitions that are grabbed from the framework partial build by default.
 _FRAMEWORK_PARTITIONS = {
     'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
-    'vbmeta_system'
+    'vbmeta_system', 'pvmfw'
 }
 
 
@@ -253,7 +254,7 @@
     if partition == 'meta':
       continue
 
-    if partition == 'images':
+    if partition in ('images', 'prebuilt_images'):
       image_partition, extension = os.path.splitext(os.path.basename(namelist))
       if image_partition == 'vbmeta':
         # Always regenerate vbmeta.img since it depends on hash information
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index df283d6..e40256c 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -248,6 +248,9 @@
 
   --security_patch_level
       Override the security patch level in target files
+
+  --max_threads
+      Specify max number of threads allowed when generating A/B OTA
 """
 
 from __future__ import print_function
@@ -267,8 +270,8 @@
 import common
 import ota_utils
 from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
-                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
-from common import IsSparseImage
+                       PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, CopyTargetFilesDir)
+from common import DoesInputFileContain, IsSparseImage
 import target_files_diff
 from check_target_files_vintf import CheckVintfIfTrebleEnabled
 from non_ab_ota import GenerateNonAbOtaPackage
@@ -321,6 +324,8 @@
 OPTIONS.enable_lz4diff = False
 OPTIONS.vabc_compression_param = None
 OPTIONS.security_patch_level = None
+OPTIONS.max_threads = None
+
 
 POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
 DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -830,6 +835,12 @@
 
 def GenerateAbOtaPackage(target_file, output_file, source_file=None):
   """Generates an Android OTA package that has A/B update payload."""
+  # If input target_files are directories, create a copy so that we can modify
+  # them directly
+  if os.path.isdir(target_file):
+    target_file = CopyTargetFilesDir(target_file)
+  if source_file is not None and os.path.isdir(source_file):
+    source_file = CopyTargetFilesDir(source_file)
   # Stage the output zip package for package signing.
   if not OPTIONS.no_signing:
     staging_file = common.MakeTempFile(suffix='.zip')
@@ -840,6 +851,7 @@
                                allowZip64=True)
 
   if source_file is not None:
+    source_file = ota_utils.ExtractTargetFiles(source_file)
     assert "ab_partitions" in OPTIONS.source_info_dict, \
         "META/ab_partitions.txt is required for ab_update."
     assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -942,9 +954,8 @@
   elif OPTIONS.skip_postinstall:
     target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
   # Target_file may have been modified, reparse ab_partitions
-  with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
-    target_info.info_dict['ab_partitions'] = zfp.read(
-        AB_PARTITIONS).decode().strip().split("\n")
+  target_info.info_dict['ab_partitions'] = common.ReadFromInputFile(target_file,
+                                                                    AB_PARTITIONS).strip().split("\n")
 
   CheckVintfIfTrebleEnabled(target_file, target_info)
 
@@ -979,6 +990,9 @@
 
   additional_args += ["--security_patch_level", security_patch_level]
 
+  if OPTIONS.max_threads:
+    additional_args += ["--max_threads", OPTIONS.max_threads]
+
   additional_args += ["--enable_zucchini=" +
                       str(OPTIONS.enable_zucchini).lower()]
 
@@ -1042,15 +1056,13 @@
 
   # If dm-verity is supported for the device, copy contents of care_map
   # into A/B OTA package.
-  target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
   if target_info.get("avb_enable") == "true":
-    care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
-                     "META/" + x in target_zip.namelist()]
-
     # Adds care_map if either the protobuf format or the plain text one exists.
-    if care_map_list:
-      care_map_name = care_map_list[0]
-      care_map_data = target_zip.read("META/" + care_map_name)
+    for care_map_name in ["care_map.pb", "care_map.txt"]:
+      if not DoesInputFileContain(target_file, "META/" + care_map_name):
+        continue
+      care_map_data = common.ReadBytesFromInputFile(
+          target_file, "META/" + care_map_name)
       # In order to support streaming, care_map needs to be packed as
       # ZIP_STORED.
       common.ZipWriteStr(output_zip, care_map_name, care_map_data,
@@ -1060,13 +1072,11 @@
 
   # Add the source apex version for incremental ota updates, and write the
   # result apex info to the ota package.
-  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_zip, source_file)
+  ota_apex_info = ota_utils.ConstructOtaApexInfo(target_file, source_file)
   if ota_apex_info is not None:
     common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
                        compress_type=zipfile.ZIP_STORED)
 
-  common.ZipClose(target_zip)
-
   # We haven't written the metadata entry yet, which will be handled in
   # FinalizeMetadata().
   common.ZipClose(output_zip)
@@ -1189,6 +1199,12 @@
       OPTIONS.vabc_compression_param = a.lower()
     elif o == "--security_patch_level":
       OPTIONS.security_patch_level = a
+    elif o in ("--max_threads"):
+      if a.isdigit():
+        OPTIONS.max_threads = a
+      else:
+        raise ValueError("Cannot parse value %r for option %r - only "
+                         "integers are allowed." % (a, o))
     else:
       return False
     return True
@@ -1240,6 +1256,7 @@
                                  "enable_lz4diff=",
                                  "vabc_compression_param=",
                                  "security_patch_level=",
+                                 "max_threads=",
                              ], extra_option_handler=option_handler)
   common.InitLogging()
 
@@ -1257,7 +1274,7 @@
   if OPTIONS.extracted_input is not None:
     OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
   else:
-    OPTIONS.info_dict = ParseInfoDict(args[0])
+    OPTIONS.info_dict = common.LoadInfoDict(args[0])
 
   if OPTIONS.wipe_user_data:
     if not OPTIONS.vabc_downgrade:
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 985aeda..3291d56 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,8 @@
 
 import ota_metadata_pb2
 import common
-from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
+import fnmatch
+from common import (ZipDelete, DoesInputFileContain, ReadBytesFromInputFile, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
                     GetRamdiskFormat, ParseUpdateEngineConfig)
@@ -44,7 +45,8 @@
 
 METADATA_NAME = 'META-INF/com/android/metadata'
 METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
-UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*',
+                 'RADIO/*', '*/build.prop', '*/default.prop', '*/build.default', "*/etc/vintf/*"]
 SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
 
 
@@ -626,12 +628,10 @@
   """If applicable, add the source version to the apex info."""
 
   def _ReadApexInfo(input_zip):
-    if "META/apex_info.pb" not in input_zip.namelist():
+    if not DoesInputFileContain(input_zip, "META/apex_info.pb"):
       logger.warning("target_file doesn't contain apex_info.pb %s", input_zip)
       return None
-
-    with input_zip.open("META/apex_info.pb", "r") as zfp:
-      return zfp.read()
+    return ReadBytesFromInputFile(input_zip, "META/apex_info.pb")
 
   target_apex_string = _ReadApexInfo(target_zip)
   # Return early if the target apex info doesn't exist or is empty.
@@ -642,8 +642,7 @@
   if not source_file:
     return target_apex_string
 
-  with zipfile.ZipFile(source_file, "r", allowZip64=True) as source_zip:
-    source_apex_string = _ReadApexInfo(source_zip)
+  source_apex_string = _ReadApexInfo(source_file)
   if not source_apex_string:
     return target_apex_string
 
@@ -727,7 +726,7 @@
     logger.info("target files %s is already extracted", path)
     return path
   extracted_dir = common.MakeTempDir("target_files")
-  common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN)
+  common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN + [""])
   return extracted_dir
 
 
@@ -1040,3 +1039,27 @@
     assert metadata_total <= payload_size
 
     return (payload_offset, metadata_total)
+
+
+def Fnmatch(filename, pattersn):
+  return any([fnmatch.fnmatch(filename, pat) for pat in pattersn])
+
+
+def CopyTargetFilesDir(input_dir):
+  output_dir = common.MakeTempDir("target_files")
+  shutil.copytree(os.path.join(input_dir, "IMAGES"), os.path.join(
+      output_dir, "IMAGES"), dirs_exist_ok=True)
+  shutil.copytree(os.path.join(input_dir, "META"), os.path.join(
+      output_dir, "META"), dirs_exist_ok=True)
+  for (dirpath, _, filenames) in os.walk(input_dir):
+    for filename in filenames:
+      path = os.path.join(dirpath, filename)
+      relative_path = path.removeprefix(input_dir).removeprefix("/")
+      if not Fnmatch(relative_path, UNZIP_PATTERN):
+        continue
+      if filename.endswith(".prop") or filename == "prop.default" or "/etc/vintf/" in relative_path:
+        target_path = os.path.join(
+            output_dir, relative_path)
+        os.makedirs(os.path.dirname(target_path), exist_ok=True)
+        shutil.copy(path, target_path)
+  return output_dir
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
index f6c0190..4837dde 100644
--- a/tools/sbom/Android.bp
+++ b/tools/sbom/Android.bp
@@ -12,6 +12,10 @@
 // See the License for the specific language governing permissions and
 // limitations under the License.
 
+package {
+    default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
 python_binary_host {
     name: "generate-sbom",
     srcs: [
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 192061e..56509c9 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -51,6 +51,38 @@
 ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
 INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
 
+SOONG_PREBUILT_MODULE_TYPES = [
+  'android_app_import',
+  'android_library_import',
+  'cc_prebuilt_binary',
+  'cc_prebuilt_library',
+  'cc_prebuilt_library_headers',
+  'cc_prebuilt_library_shared',
+  'cc_prebuilt_library_static',
+  'cc_prebuilt_object',
+  'dex_import',
+  'java_import',
+  'java_sdk_library_import',
+  'java_system_modules_import',
+  'libclang_rt_prebuilt_library_static',
+  'libclang_rt_prebuilt_library_shared',
+  'llvm_prebuilt_library_static',
+  'ndk_prebuilt_object',
+  'ndk_prebuilt_shared_stl',
+  'nkd_prebuilt_static_stl',
+  'prebuilt_apex',
+  'prebuilt_bootclasspath_fragment',
+  'prebuilt_dsp',
+  'prebuilt_firmware',
+  'prebuilt_kernel_modules',
+  'prebuilt_rfsa',
+  'prebuilt_root',
+  'rust_prebuilt_dylib',
+  'rust_prebuilt_library',
+  'rust_prebuilt_rlib',
+  'vndk_prebuilt_shared',
+]
+
 
 def get_args():
   parser = argparse.ArgumentParser()
@@ -106,35 +138,8 @@
 
 
 def is_soong_prebuilt_module(file_metadata):
-  return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
-      'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
-      'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
-      'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
-      'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
-      'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
-      'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
-      'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
-      'vndk_prebuilt_shared',
-
-      # 'android_test_import',
-      # 'cc_prebuilt_test_library_shared',
-      # 'java_import_host',
-      # 'java_test_import',
-      # 'llvm_host_prebuilt_library_shared',
-      # 'prebuilt_apis',
-      # 'prebuilt_build_tool',
-      # 'prebuilt_defaults',
-      # 'prebuilt_etc',
-      # 'prebuilt_etc_host',
-      # 'prebuilt_etc_xml',
-      # 'prebuilt_font',
-      # 'prebuilt_hidl_interfaces',
-      # 'prebuilt_platform_compat_config',
-      # 'prebuilt_stubs_sources',
-      # 'prebuilt_usr_share',
-      # 'prebuilt_usr_share_host',
-      # 'soong_config_module_type_import',
-  ]
+  return (file_metadata['soong_module_type'] and
+          file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES)
 
 
 def is_source_package(file_metadata):
@@ -397,7 +402,7 @@
                              creators=['Organization: ' + args.product_mfr])
     for installed_file_metadata in reader:
       installed_file = installed_file_metadata['installed_file']
-      if args.output_file != args.product_out_dir + installed_file + ".spdx":
+      if args.output_file != args.product_out_dir + installed_file + '.spdx.json':
         continue
 
       module_path = installed_file_metadata['module_path']
@@ -418,7 +423,10 @@
       doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
       break
 
-  with open(args.output_file, 'w', encoding="utf-8") as file:
+  with open(args.output_file, 'w', encoding='utf-8') as file:
+    sbom_writers.JSONWriter.write(doc, file)
+  fragment_file = args.output_file.removesuffix('.spdx.json') + '-fragment.spdx'
+  with open(fragment_file, 'w', encoding='utf-8') as file:
     sbom_writers.TagValueWriter.write(doc, file, fragment=True)