Merge "Include hwasan lib and runtime on arm64 system images"
diff --git a/core/Makefile b/core/Makefile
index 65a5c51..85f33bc 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2981,7 +2981,7 @@
 
 endif # BUILDING_DEBUG_BOOT_IMAGE || BUILDING_DEBUG_VENDOR_BOOT_IMAGE
 
-
+PARTITION_COMPAT_SYMLINKS :=
 # Creates a compatibility symlink between two partitions, e.g. /system/vendor to /vendor
 # $1: from location (e.g $(TARGET_OUT)/vendor)
 # $2: destination location (e.g. /vendor)
@@ -2999,6 +2999,7 @@
 	ln -sfn $2 $1
 $1: .KATI_SYMLINK_OUTPUTS := $1
 )
+$(eval PARTITION_COMPAT_SYMLINKS += $1)
 $1
 endef
 
@@ -3109,17 +3110,23 @@
 
 # Create symlink /system/vendor to /vendor if necessary.
 ifdef BOARD_USES_VENDORIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  _vendor_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/vendor,/vendor,vendor.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_vendor_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_symlink)
 endif
 
 # Create symlink /system/product to /product if necessary.
 ifdef BOARD_USES_PRODUCTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  _product_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/product,/product,product.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_product_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_product_symlink)
 endif
 
 # Create symlink /system/system_ext to /system_ext if necessary.
 ifdef BOARD_USES_SYSTEM_EXTIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  _systemext_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/system_ext,/system_ext,system_ext.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_systemext_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_systemext_symlink)
 endif
 
 # -----------------------------------------------------------------
@@ -3132,7 +3139,9 @@
 # - /system/lib/modules is a symlink to a directory that stores system DLKMs.
 # - The system_dlkm partition is mounted at /system_dlkm at runtime.
 ifdef BOARD_USES_SYSTEM_DLKMIMAGE
-  INTERNAL_SYSTEMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  _system_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT)/lib/modules,/system_dlkm/lib/modules,system_dlkm.img)
+  INTERNAL_SYSTEMIMAGE_FILES += $(_system_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_system_dlkm_lib_modules_symlink)
 endif
 
 FULL_SYSTEMIMAGE_DEPS := $(INTERNAL_SYSTEMIMAGE_FILES) $(INTERNAL_USERIMAGES_DEPS)
@@ -3521,7 +3530,9 @@
 
 # Create symlink /vendor/odm to /odm if necessary.
 ifdef BOARD_USES_ODMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  _odm_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/odm,/odm,odm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_odm_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_symlink)
 endif
 
 # Create symlinks for vendor_dlkm on devices with a vendor_dlkm partition:
@@ -3539,7 +3550,9 @@
 # The vendor DLKMs and other vendor_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_VENDOR_DLKMIMAGE
-  INTERNAL_VENDORIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  _vendor_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_VENDOR)/lib/modules,/vendor_dlkm/lib/modules,vendor_dlkm.img)
+  INTERNAL_VENDORIMAGE_FILES += $(_vendor_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_vendor_dlkm_lib_modules_symlink)
 endif
 
 # Install vendor/etc/linker.config.pb with PRODUCT_VENDOR_LINKER_CONFIG_FRAGMENTS and STUB_LIBRARIES
@@ -3760,7 +3773,9 @@
 # The odm DLKMs and other odm_dlkm files must not be accessed using other paths because they
 # are not guaranteed to exist on all devices.
 ifdef BOARD_USES_ODM_DLKMIMAGE
-  INTERNAL_ODMIMAGE_FILES += $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  _odm_dlkm_lib_modules_symlink := $(call create-partition-compat-symlink,$(TARGET_OUT_ODM)/lib/modules,/odm_dlkm/lib/modules,odm_dlkm.img)
+  INTERNAL_ODMIMAGE_FILES += $(_odm_dlkm_lib_modules_symlink)
+  ALL_DEFAULT_INSTALLED_MODULES += $(_odm_dlkm_lib_modules_symlink)
 endif
 
 INSTALLED_FILES_FILE_ODM := $(PRODUCT_OUT)/installed-files-odm.txt
diff --git a/core/board_config_wifi.mk b/core/board_config_wifi.mk
index a736099..8289bf2 100644
--- a/core/board_config_wifi.mk
+++ b/core/board_config_wifi.mk
@@ -78,3 +78,6 @@
 ifdef WIFI_SKIP_STATE_TOGGLE_OFF_ON_FOR_NAN
     $(call soong_config_set,wifi,wifi_skip_state_toggle_off_on_for_nan,true)
 endif
+ifeq ($(strip $(TARGET_USES_AOSP_FOR_WLAN)),true)
+    $(call soong_config_set,wifi,target_uses_aosp_for_wlan,true)
+endif
\ No newline at end of file
diff --git a/core/config.mk b/core/config.mk
index 91c8bd8..26e90ef 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -863,7 +863,6 @@
 
 # A list of SEPolicy versions, besides PLATFORM_SEPOLICY_VERSION, that the framework supports.
 PLATFORM_SEPOLICY_COMPAT_VERSIONS := \
-    28.0 \
     29.0 \
     30.0 \
     31.0 \
diff --git a/core/device.mk b/core/device.mk
deleted file mode 100644
index 20ff447..0000000
--- a/core/device.mk
+++ /dev/null
@@ -1,76 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-_device_var_list := \
-    DEVICE_NAME \
-    DEVICE_BOARD \
-    DEVICE_REGION
-
-define dump-device
-$(info ==== $(1) ====)\
-$(foreach v,$(_device_var_list),\
-$(info DEVICES.$(1).$(v) := $(DEVICES.$(1).$(v))))\
-$(info --------)
-endef
-
-define dump-devices
-$(foreach p,$(DEVICES),$(call dump-device,$(p)))
-endef
-
-#
-# $(1): device to inherit
-#
-define inherit-device
-  $(foreach v,$(_device_var_list), \
-      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(strip $(1))))
-endef
-
-#
-# $(1): device makefile list
-#
-#TODO: check to make sure that devices have all the necessary vars defined
-define import-devices
-$(call import-nodes,DEVICES,$(1),$(_device_var_list))
-endef
-
-
-#
-# $(1): short device name like "sooner"
-#
-define _resolve-short-device-name
-  $(eval dn := $(strip $(1)))
-  $(eval d := \
-      $(foreach d,$(DEVICES), \
-          $(if $(filter $(dn),$(DEVICES.$(d).DEVICE_NAME)), \
-            $(d) \
-       )) \
-   )
-  $(eval d := $(sort $(d)))
-  $(if $(filter 1,$(words $(d))), \
-    $(d), \
-    $(if $(filter 0,$(words $(d))), \
-      $(error No matches for device "$(dn)"), \
-      $(error Device "$(dn)" ambiguous: matches $(d)) \
-    ) \
-  )
-endef
-
-#
-# $(1): short device name like "sooner"
-#
-define resolve-short-device-name
-$(strip $(call _resolve-short-device-name,$(1)))
-endef
diff --git a/core/main.mk b/core/main.mk
index f0a2483..e84dfaa 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -2179,7 +2179,8 @@
 	  $(eval _is_kernel_modules_blocklist := $(if $(findstring $f,$(ALL_KERNEL_MODULES_BLOCKLIST)),Y)) \
 	  $(eval _is_fsverity_build_manifest_apk := $(if $(findstring $f,$(ALL_FSVERITY_BUILD_MANIFEST_APK)),Y)) \
 	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
-	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)) \
+	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
+	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
 	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ $(newline) \
 	  $(if $(_post_installed_dexpreopt_zip), \
 	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ ; done $(newline) \
@@ -2194,6 +2195,7 @@
 	rm -rf $@
 	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --json
 
+$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
 else
 apps_only_sbom_files := $(sort $(patsubst %,%.spdx,$(apps_only_installed_files)))
 $(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
diff --git a/core/product_config.mk b/core/product_config.mk
index 7055a1e..1ef8890 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -74,7 +74,7 @@
 ###########################################################
 
 define find-copy-subdir-files
-$(sort $(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g"))
+$(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g" | sort)
 endef
 
 #
@@ -144,7 +144,6 @@
 #
 include $(BUILD_SYSTEM)/node_fns.mk
 include $(BUILD_SYSTEM)/product.mk
-include $(BUILD_SYSTEM)/device.mk
 
 # Read all product definitions.
 #
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 95b3d37..b160648 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -104,7 +104,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-03-05
+    PLATFORM_SECURITY_PATCH := 2023-04-05
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index 4de4e0c..b8ca3aa 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -51,7 +51,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index bc35b95..0e5d9fe 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -46,7 +46,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
 
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 7db2c0f..669874e 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -47,7 +47,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
 
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index d55866f..deaa3b1 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -56,7 +56,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index f96e068..39ad0d8 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -45,7 +45,6 @@
 #
 # All components inherited here go to vendor image
 #
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_arm/device.mk)
 
diff --git a/tools/Android.bp b/tools/Android.bp
index e325f6b..bea0602 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -71,22 +71,6 @@
 }
 
 python_binary_host {
-    name: "generate-sbom",
-    srcs: [
-        "generate-sbom.py",
-    ],
-    version: {
-        py3: {
-            embedded_launcher: true,
-        },
-    },
-    libs: [
-        "metadata_file_proto_py",
-        "libprotobuf-python",
-    ],
-}
-
-python_binary_host {
     name: "list_files",
     main: "list_files.py",
     srcs: [
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index f85a46f..ef5c760 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -142,6 +142,7 @@
         "spdx-tools-builder2v2",
         "spdx-tools-spdxcommon",
         "spdx-tools-spdx-json",
+        "spdx-tools-spdxlib",
     ],
     testSrcs: ["cmd/sbom/sbom_test.go"],
 }
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
index 3cdfa0a..c378e39 100644
--- a/tools/compliance/cmd/sbom/sbom.go
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -38,6 +38,7 @@
 	"github.com/spdx/tools-golang/json"
 	"github.com/spdx/tools-golang/spdx/common"
 	spdx "github.com/spdx/tools-golang/spdx/v2_2"
+	"github.com/spdx/tools-golang/spdxlib"
 )
 
 var (
@@ -173,6 +174,7 @@
 		os.Exit(1)
 	}
 
+	// writing the spdx Doc created
 	if err := spdx_json.Save2_2(spdxDoc, ofile); err != nil {
 		fmt.Fprintf(os.Stderr, "failed to write document to %v: %v", *outputFile, err)
 		os.Exit(1)
@@ -516,7 +518,7 @@
 
 	ci.Created = ctx.creationTime()
 
-	return &spdx.Document{
+	doc := &spdx.Document{
 		SPDXVersion:       "SPDX-2.2",
 		DataLicense:       "CC0-1.0",
 		SPDXIdentifier:    "DOCUMENT",
@@ -526,5 +528,11 @@
 		Packages:          pkgs,
 		Relationships:     relationships,
 		OtherLicenses:     otherLicenses,
-	}, deps, nil
+	}
+
+	if err := spdxlib.ValidateDocument2_2(doc); err != nil {
+		return nil, nil, fmt.Errorf("Unable to validate the SPDX doc: %v\n", err)
+	}
+
+	return doc, deps, nil
 }
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
index 65a2df1..6472f51 100644
--- a/tools/compliance/cmd/sbom/sbom_test.go
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -2226,6 +2226,10 @@
 				t.Errorf("sbom: gotStderr = %v, want none", stderr)
 			}
 
+			if err := validate(spdxDoc); err != nil {
+				t.Fatalf("sbom: document fails to validate: %v", err)
+			}
+
 			gotData, err := json.Marshal(spdxDoc)
 			if err != nil {
 				t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
@@ -2267,6 +2271,36 @@
 	return ci
 }
 
+// validate returns an error if the Document is found to be invalid
+func validate(doc *spdx.Document) error {
+	if doc.SPDXVersion == "" {
+		return fmt.Errorf("SPDXVersion: got nothing, want spdx version")
+	}
+	if doc.DataLicense == "" {
+		return fmt.Errorf("DataLicense: got nothing, want Data License")
+	}
+	if doc.SPDXIdentifier == "" {
+		return fmt.Errorf("SPDXIdentifier: got nothing, want SPDX Identifier")
+	}
+	if doc.DocumentName == "" {
+		return fmt.Errorf("DocumentName: got nothing, want Document Name")
+	}
+	if fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator) != "Google LLC" {
+		return fmt.Errorf("Creator: got %v, want  'Google LLC'")
+	}
+	_, err := time.Parse(time.RFC3339, doc.CreationInfo.Created)
+	if err != nil {
+		return fmt.Errorf("Invalid time spec: %q: got error %q, want no error", doc.CreationInfo.Created, err)
+	}
+
+	for _, license := range doc.OtherLicenses {
+		if license.ExtractedText == "" {
+			return fmt.Errorf("License file: %q: got nothing, want license text", license.LicenseName)
+		}
+	}
+	return nil
+}
+
 // compareSpdxDocs deep-compares two spdx docs by going through the info section, packages, relationships and licenses
 func compareSpdxDocs(t *testing.T, actual, expected *spdx.Document) {
 
diff --git a/tools/finalization/README.md b/tools/finalization/README.md
new file mode 100644
index 0000000..501f260
--- /dev/null
+++ b/tools/finalization/README.md
@@ -0,0 +1,22 @@
+# Finalization tools
+This folder contains automation and CI scripts for [finalizing](https://go/android-finalization) Android before release.
+
+## Automation:
+1. [Environment setup](./environment.sh). Set values for varios finalization constants.
+2. [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh). Prepare the branch for SDK release. SDK contains Android Java APIs and other stable APIs. Commonly referred as a 1st step.
+3. [Finalize Android](./finalize-sdk-rel.sh). Mark branch as "REL", i.e. prepares for Android release. Any signed build containing these changes will be considered an official Android Release. Referred as a 2nd finalization step.
+4. [Finalize SDK and submit](./step-1.sh). Do [Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) step, create CLs, organize them into topic and send to Gerrit.
+  a. [Update SDK and submit](./update-step-1.sh). Same as above, but updates the existings CLs.
+5. [Finalize Android and submit](./step-2.sh). Do [Finalize Android](./finalize-sdk-rel.sh) step, create  CLs, organize them into topic and send to Gerrit.
+  a. [Update Android and submit](./update-step-2.sh). Same as above, but updates the existings CLs.
+
+## CI:
+Performed in build targets in Finalization branches.
+1. [Finalization Step 1 for Main, git_main-fina-1-release](https://android-build.googleplex.com/builds/branches/git_main-fina-1-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh).
+2. [Finalization Step 1 for UDC, git_udc-fina-1-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-1-release/grid). Same but for udc-dev.
+3. [Finalization Step 2 for Main, git_main-fina-2-release](https://android-build.googleplex.com/builds/branches/git_main-fina-2-release/grid). Test [1st step/Finalize SDK](./finalize-aidl-vndk-sdk-resources.sh) and [2nd step/Finalize Android](./finalize-sdk-rel.sh). Use [local finalization](./localonly-steps.sh) to build and copy presubmits.
+4. [Finalization Step 2 for UDC, git_udc-fina-2-release](https://android-build.googleplex.com/builds/branches/git_udc-fina-2-release/grid). Same but for udc-dev.
+5. [Local finalization steps](./localonly-steps.sh) are done only during local testing or in the CI lab. Normally these steps use artifacts from other builds.
+
+## Utility:
+[Full cleanup](./cleanup.sh). Remove all local changes and switch each project into head-less state. This is the best state to sync/rebase/finalize the branch.
diff --git a/tools/finalization/build-step-1-and-2.sh b/tools/finalization/build-step-1-and-2.sh
index 1b749b1..84e2782 100755
--- a/tools/finalization/build-step-1-and-2.sh
+++ b/tools/finalization/build-step-1-and-2.sh
@@ -6,14 +6,18 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    # SDK codename -> int
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # SDK codename -> int
+        source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    fi;
 
-    # Platform/Mainline SDKs build and move to prebuilts
-    source $top/build/make/tools/finalization/localonly-finalize-mainline-sdk.sh
+    if [ "$FINAL_STATE" = "unfinalized" ] || [ "$FINAL_STATE" = "sdk" ] ; then
+        # ADB, Platform/Mainline SDKs build and move to prebuilts
+        source $top/build/make/tools/finalization/localonly-steps.sh
 
-    # REL
-    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+        # REL
+        source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+    fi;
 }
 
 finalize_main_step12
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
index a8d590f..3c618fe 100755
--- a/tools/finalization/build-step-1.sh
+++ b/tools/finalization/build-step-1.sh
@@ -6,8 +6,11 @@
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
-    # Build finalization artifacts.
-    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+    if [ "$FINAL_STATE" = "unfinalized" ] ; then
+        # Build finalization artifacts.
+        # source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+        echo "Build finalization artifacts."
+    fi;
 }
 
 finalize_main_step1
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index 14951b8..8c838aa 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -12,4 +12,10 @@
 
 export FINAL_BUILD_PREFIX='UP1A'
 
-export FINAL_MAINLINE_EXTENSION='7'
\ No newline at end of file
+export FINAL_MAINLINE_EXTENSION='7'
+
+# Options:
+# 'unfinalized' - branch is in development state,
+# 'sdk' - SDK/API is finalized
+# 'rel' - branch is finalized, switched to REL
+export FINAL_STATE='unfinalized'
diff --git a/tools/finalization/localonly-finalize-mainline-sdk.sh b/tools/finalization/localonly-steps.sh
similarity index 93%
rename from tools/finalization/localonly-finalize-mainline-sdk.sh
rename to tools/finalization/localonly-steps.sh
index 2b77c5d..6107b3e 100755
--- a/tools/finalization/localonly-finalize-mainline-sdk.sh
+++ b/tools/finalization/localonly-steps.sh
@@ -2,7 +2,7 @@
 
 set -ex
 
-function finalize_locally_mainline_sdk() {
+function finalize_locally() {
     local top="$(dirname "$0")"/../../../..
     source $top/build/make/tools/finalization/environment.sh
 
@@ -23,5 +23,4 @@
     "$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
 }
 
-finalize_locally_mainline_sdk
-
+finalize_locally
diff --git a/tools/finalization/step-1.sh b/tools/finalization/step-1.sh
index cf21e45..0dd4b3a 100755
--- a/tools/finalization/step-1.sh
+++ b/tools/finalization/step-1.sh
@@ -9,7 +9,7 @@
         if [[ $(git status --short) ]]; then
             repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
             git add -A . ;
-            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION" \
+            git commit -m "$FINAL_PLATFORM_CODENAME is now $FINAL_PLATFORM_SDK_VERSION and extension version $FINAL_MAINLINE_EXTENSION" \
                        -m "Ignore-AOSP-First: $FINAL_PLATFORM_CODENAME Finalization
 Bug: $FINAL_BUG_ID
 Test: build";
diff --git a/tools/finalization/update-step-1.sh b/tools/finalization/update-step-1.sh
new file mode 100755
index 0000000..b469988
--- /dev/null
+++ b/tools/finalization/update-step-1.sh
@@ -0,0 +1,39 @@
+#!/bin/bash
+# Script to perform a 1st step of Android Finalization: API/SDK finalization, update CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_1_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_1_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # vndk etc finalization
+    source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+
+    # update existing CLs and upload to gerrit
+    update_step_1_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_1_main
diff --git a/tools/finalization/update-step-2.sh b/tools/finalization/update-step-2.sh
new file mode 100755
index 0000000..d2b8592
--- /dev/null
+++ b/tools/finalization/update-step-2.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Script to perform a 2nd step of Android Finalization: REL finalization, create CLs and upload to Gerrit.
+
+# WIP, does not work yet
+exit 10
+
+set -ex
+
+function update_step_2_changes() {
+    set +e
+    repo forall -c '\
+        if [[ $(git status --short) ]]; then
+            git stash -u ;
+            repo start "$FINAL_PLATFORM_CODENAME-SDK-Finalization-Rel" ;
+            git stash pop ;
+            git add -A . ;
+            git commit --amend --no-edit ;
+            repo upload --cbr --no-verify -o nokeycheck -t -y . ;
+        fi'
+}
+
+function update_step_2_main() {
+    local top="$(dirname "$0")"/../../../..
+    source $top/build/make/tools/finalization/environment.sh
+
+    local m="$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=aosp_arm64 TARGET_BUILD_VARIANT=userdebug"
+
+    # prebuilts etc
+    source $top/build/make/tools/finalization/finalize-sdk-rel.sh
+
+    # move all changes to finalization branch/topic and upload to gerrit
+    update_step_2_changes
+
+    # build to confirm everything is OK
+    AIDL_FROZEN_REL=true $m
+}
+
+update_step_2_main
diff --git a/tools/generate-sbom.py b/tools/generate-sbom.py
deleted file mode 100755
index eae7945..0000000
--- a/tools/generate-sbom.py
+++ /dev/null
@@ -1,684 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Generate the SBOM of the current target product in SPDX format.
-Usage example:
-  generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
-                   --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
-                   --product_out_dir=out/target/product/vsoc_x86_64 \
-                   --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
-                   --product_mfr=Google
-"""
-
-import argparse
-import csv
-import datetime
-import google.protobuf.text_format as text_format
-import hashlib
-import json
-import os
-import metadata_file_pb2
-
-# Common
-SPDXID = 'SPDXID'
-SPDX_VERSION = 'SPDXVersion'
-DATA_LICENSE = 'DataLicense'
-DOCUMENT_NAME = 'DocumentName'
-DOCUMENT_NAMESPACE = 'DocumentNamespace'
-CREATED = 'Created'
-CREATOR = 'Creator'
-EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
-
-# Package
-PACKAGE_NAME = 'PackageName'
-PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
-PACKAGE_VERSION = 'PackageVersion'
-PACKAGE_SUPPLIER = 'PackageSupplier'
-FILES_ANALYZED = 'FilesAnalyzed'
-PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
-PACKAGE_EXTERNAL_REF = 'ExternalRef'
-# Package license
-PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
-PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
-PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
-PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
-
-# File
-FILE_NAME = 'FileName'
-FILE_CHECKSUM = 'FileChecksum'
-# File license
-FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
-FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
-FILE_LICENSE_COMMENTS = 'LicenseComments'
-FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
-FILE_NOTICE = 'FileNotice'
-FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
-
-# Relationship
-RELATIONSHIP = 'Relationship'
-REL_DESCRIBES = 'DESCRIBES'
-REL_VARIANT_OF = 'VARIANT_OF'
-REL_GENERATED_FROM = 'GENERATED_FROM'
-
-# Package type
-PKG_SOURCE = 'SOURCE'
-PKG_UPSTREAM = 'UPSTREAM'
-PKG_PREBUILT = 'PREBUILT'
-
-# Security tag
-NVD_CPE23 = 'NVD-CPE2.3:'
-
-# Report
-ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
-ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
-ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
-ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
-INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
-
-
-def get_args():
-  parser = argparse.ArgumentParser()
-  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
-  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
-  parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
-  parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
-  parser.add_argument('--build_version', required=True, help='The build version.')
-  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
-  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
-  parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
-
-  return parser.parse_args()
-
-
-def log(*info):
-  if args.verbose:
-    for i in info:
-      print(i)
-
-
-def new_doc_header(doc_id):
-  return {
-      SPDX_VERSION: 'SPDX-2.3',
-      DATA_LICENSE: 'CC0-1.0',
-      SPDXID: doc_id,
-      DOCUMENT_NAME: args.build_version,
-      DOCUMENT_NAMESPACE: f'https://www.google.com/sbom/spdx/android/{args.build_version}',
-      CREATOR: 'Organization: Google, LLC',
-      CREATED: '<timestamp>',
-      EXTERNAL_DOCUMENT_REF: [],
-  }
-
-
-def new_package_record(id, name, version, supplier, download_location=None, files_analyzed='false', external_refs=[]):
-  package = {
-      PACKAGE_NAME: name,
-      SPDXID: id,
-      PACKAGE_DOWNLOAD_LOCATION: download_location if download_location else 'NONE',
-      FILES_ANALYZED: files_analyzed,
-  }
-  if version:
-    package[PACKAGE_VERSION] = version
-  if supplier:
-    package[PACKAGE_SUPPLIER] = f'Organization: {supplier}'
-  if external_refs:
-    package[PACKAGE_EXTERNAL_REF] = external_refs
-
-  return package
-
-
-def new_file_record(id, name, checksum):
-  return {
-      FILE_NAME: name,
-      SPDXID: id,
-      FILE_CHECKSUM: checksum
-  }
-
-
-def encode_for_spdxid(s):
-  """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
-  result = ''
-  for c in s:
-    if c.isalnum() or c in '.-':
-      result += c
-    elif c in '_@/':
-      result += '-'
-    else:
-      result += '0x' + c.encode('utf-8').hex()
-
-  return result.lstrip('-')
-
-
-def new_package_id(package_name, type):
-  return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
-
-
-def new_external_doc_ref(package_name, sbom_url, sbom_checksum):
-  doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(package_name)}'
-  return f'{EXTERNAL_DOCUMENT_REF}: {doc_ref_id} {sbom_url} {sbom_checksum}', doc_ref_id
-
-
-def new_file_id(file_path):
-  return f'SPDXRef-{encode_for_spdxid(file_path)}'
-
-
-def new_relationship_record(id1, relationship, id2):
-  return f'{RELATIONSHIP}: {id1} {relationship} {id2}'
-
-
-def checksum(file_path):
-  file_path = args.product_out_dir + '/' + file_path
-  h = hashlib.sha1()
-  if os.path.islink(file_path):
-    h.update(os.readlink(file_path).encode('utf-8'))
-  else:
-    with open(file_path, 'rb') as f:
-      h.update(f.read())
-  return f'SHA1: {h.hexdigest()}'
-
-
-def is_soong_prebuilt_module(file_metadata):
-  return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
-      'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
-      'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
-      'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
-      'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
-      'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
-      'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
-      'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
-      'vndk_prebuilt_shared',
-
-      # 'android_test_import',
-      # 'cc_prebuilt_test_library_shared',
-      # 'java_import_host',
-      # 'java_test_import',
-      # 'llvm_host_prebuilt_library_shared',
-      # 'prebuilt_apis',
-      # 'prebuilt_build_tool',
-      # 'prebuilt_defaults',
-      # 'prebuilt_etc',
-      # 'prebuilt_etc_host',
-      # 'prebuilt_etc_xml',
-      # 'prebuilt_font',
-      # 'prebuilt_hidl_interfaces',
-      # 'prebuilt_platform_compat_config',
-      # 'prebuilt_stubs_sources',
-      # 'prebuilt_usr_share',
-      # 'prebuilt_usr_share_host',
-      # 'soong_config_module_type_import',
-  ]
-
-
-def is_source_package(file_metadata):
-  module_path = file_metadata['module_path']
-  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
-
-
-def is_prebuilt_package(file_metadata):
-  module_path = file_metadata['module_path']
-  if module_path:
-    return (module_path.startswith('prebuilts/') or
-            is_soong_prebuilt_module(file_metadata) or
-            file_metadata['is_prebuilt_make_module'])
-
-  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
-  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
-    return True
-
-  return False
-
-
-def get_source_package_info(file_metadata, metadata_file_path):
-  if not metadata_file_path:
-    return file_metadata['module_path'], []
-
-  metadata_proto = metadata_file_protos[metadata_file_path]
-  external_refs = []
-  for tag in metadata_proto.third_party.security.tag:
-    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
-      external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe23Type {tag.removeprefix(NVD_CPE23)}')
-    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
-      external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe22Type {tag.removeprefix(NVD_CPE23)}')
-
-  if metadata_proto.name:
-    return metadata_proto.name, external_refs
-  else:
-    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
-
-
-def get_prebuilt_package_name(file_metadata, metadata_file_path):
-  name = None
-  if metadata_file_path:
-    metadata_proto = metadata_file_protos[metadata_file_path]
-    if metadata_proto.name:
-      name = metadata_proto.name
-    else:
-      name = metadata_file_path
-  elif file_metadata['module_path']:
-    name = file_metadata['module_path']
-  elif file_metadata['kernel_module_copy_files']:
-    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
-    name = os.path.dirname(src_path)
-
-  return name.removeprefix('prebuilts/').replace('/', '-')
-
-
-def get_metadata_file_path(file_metadata):
-  metadata_path = ''
-  if file_metadata['module_path']:
-    metadata_path = file_metadata['module_path']
-  elif file_metadata['kernel_module_copy_files']:
-    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
-
-  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
-    metadata_path = os.path.dirname(metadata_path)
-
-  return metadata_path
-
-
-def get_package_version(metadata_file_path):
-  if not metadata_file_path:
-    return None
-  metadata_proto = metadata_file_protos[metadata_file_path]
-  return metadata_proto.third_party.version
-
-
-def get_package_homepage(metadata_file_path):
-  if not metadata_file_path:
-    return None
-  metadata_proto = metadata_file_protos[metadata_file_path]
-  if metadata_proto.third_party.homepage:
-    return metadata_proto.third_party.homepage
-  for url in metadata_proto.third_party.url:
-    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
-      return url.value
-
-  return None
-
-
-def get_package_download_location(metadata_file_path):
-  if not metadata_file_path:
-    return None
-  metadata_proto = metadata_file_protos[metadata_file_path]
-  if metadata_proto.third_party.url:
-    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
-    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
-      return urls[0].value
-    elif len(urls) > 1:
-      return urls[1].value
-
-  return None
-
-
-def get_sbom_fragments(installed_file_metadata, metadata_file_path):
-  external_doc_ref = None
-  packages = []
-  relationships = []
-
-  # Info from METADATA file
-  homepage = get_package_homepage(metadata_file_path)
-  version = get_package_version(metadata_file_path)
-  download_location = get_package_download_location(metadata_file_path)
-
-  if is_source_package(installed_file_metadata):
-    # Source fork packages
-    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
-    source_package_id = new_package_id(name, PKG_SOURCE)
-    source_package = new_package_record(source_package_id, name, args.build_version, args.product_mfr,
-                                        external_refs=external_refs)
-
-    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
-    upstream_package = new_package_record(upstream_package_id, name, version, homepage, download_location)
-    packages += [source_package, upstream_package]
-    relationships.append(new_relationship_record(source_package_id, REL_VARIANT_OF, upstream_package_id))
-  elif is_prebuilt_package(installed_file_metadata):
-    # Prebuilt fork packages
-    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
-    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
-    prebuilt_package = new_package_record(prebuilt_package_id, name, args.build_version, args.product_mfr)
-    packages.append(prebuilt_package)
-
-    if metadata_file_path:
-      metadata_proto = metadata_file_protos[metadata_file_path]
-      if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
-        sbom_url = metadata_proto.third_party.sbom_ref.url
-        sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
-        upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
-        if sbom_url and sbom_checksum and upstream_element_id:
-          external_doc_ref, doc_ref_id = new_external_doc_ref(name, sbom_url, sbom_checksum)
-          relationships.append(
-              new_relationship_record(prebuilt_package_id, REL_VARIANT_OF, doc_ref_id + ':' + upstream_element_id))
-
-  return external_doc_ref, packages, relationships
-
-
-def generate_package_verification_code(files):
-  checksums = [file[FILE_CHECKSUM] for file in files]
-  checksums.sort()
-  h = hashlib.sha1()
-  h.update(''.join(checksums).encode(encoding='utf-8'))
-  return h.hexdigest()
-
-
-def write_record(f, record):
-  if record.__class__.__name__ == 'dict':
-    for k, v in record.items():
-      if k == EXTERNAL_DOCUMENT_REF or k == PACKAGE_EXTERNAL_REF:
-        for ref in v:
-          f.write(ref + '\n')
-      else:
-        f.write('{}: {}\n'.format(k, v))
-  elif record.__class__.__name__ == 'str':
-    f.write(record + '\n')
-  f.write('\n')
-
-
-def write_tagvalue_sbom(all_records):
-  with open(args.output_file, 'w', encoding="utf-8") as output_file:
-    for rec in all_records:
-      write_record(output_file, rec)
-
-
-def write_json_sbom(all_records, product_package_id):
-  doc = {}
-  product_package = None
-  for r in all_records:
-    if r.__class__.__name__ == 'dict':
-      if DOCUMENT_NAME in r:  # Doc header
-        doc['spdxVersion'] = r[SPDX_VERSION]
-        doc['dataLicense'] = r[DATA_LICENSE]
-        doc[SPDXID] = r[SPDXID]
-        doc['name'] = r[DOCUMENT_NAME]
-        doc['documentNamespace'] = r[DOCUMENT_NAMESPACE]
-        doc['creationInfo'] = {
-            'creators': [r[CREATOR]],
-            'created': r[CREATED],
-        }
-        doc['externalDocumentRefs'] = []
-        for ref in r[EXTERNAL_DOCUMENT_REF]:
-          # ref is 'ExternalDocumentRef: <doc id> <doc url> SHA1: xxxxx'
-          fields = ref.split(' ')
-          doc_ref = {
-              'externalDocumentId': fields[1],
-              'spdxDocument': fields[2],
-              'checksum': {
-                  'algorithm': fields[3][:-1],
-                  'checksumValue': fields[4]
-              }
-          }
-          doc['externalDocumentRefs'].append(doc_ref)
-        doc['documentDescribes'] = []
-        doc['packages'] = []
-        doc['files'] = []
-        doc['relationships'] = []
-
-      elif PACKAGE_NAME in r:  # packages
-        package = {
-            'name': r[PACKAGE_NAME],
-            SPDXID: r[SPDXID],
-            'downloadLocation': r[PACKAGE_DOWNLOAD_LOCATION],
-            'filesAnalyzed': r[FILES_ANALYZED] == "true"
-        }
-        if PACKAGE_VERSION in r:
-          package['versionInfo'] = r[PACKAGE_VERSION]
-        if PACKAGE_SUPPLIER in r:
-          package['supplier'] = r[PACKAGE_SUPPLIER]
-        if PACKAGE_VERIFICATION_CODE in r:
-          package['packageVerificationCode'] = {
-              'packageVerificationCodeValue': r[PACKAGE_VERIFICATION_CODE]
-          }
-        if PACKAGE_EXTERNAL_REF in r:
-          package['externalRefs'] = []
-          for ref in r[PACKAGE_EXTERNAL_REF]:
-            # ref is 'ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4'
-            fields = ref.split(' ')
-            ext_ref = {
-                'referenceCategory': fields[1],
-                'referenceType': fields[2],
-                'referenceLocator': fields[3],
-            }
-            package['externalRefs'].append(ext_ref)
-
-        doc['packages'].append(package)
-        if r[SPDXID] == product_package_id:
-          product_package = package
-          product_package['hasFiles'] = []
-
-      elif FILE_NAME in r:  # files
-        file = {
-            'fileName': r[FILE_NAME],
-            SPDXID: r[SPDXID]
-        }
-        checksum = r[FILE_CHECKSUM].split(': ')
-        file['checksums'] = [{
-            'algorithm': checksum[0],
-            'checksumValue': checksum[1],
-        }]
-        doc['files'].append(file)
-        product_package['hasFiles'].append(r[SPDXID])
-
-    elif r.__class__.__name__ == 'str':
-      if r.startswith(RELATIONSHIP):
-        # r is 'Relationship: <spdxid> <relationship> <spdxid>'
-        fields = r.split(' ')
-        rel = {
-            'spdxElementId': fields[1],
-            'relatedSpdxElement': fields[3],
-            'relationshipType': fields[2],
-        }
-        if fields[2] == REL_DESCRIBES:
-          doc['documentDescribes'].append(fields[3])
-        else:
-          doc['relationships'].append(rel)
-
-  with open(args.output_file + '.json', 'w', encoding="utf-8") as output_file:
-    output_file.write(json.dumps(doc, indent=4))
-
-
-def save_report(report):
-  prefix, _ = os.path.splitext(args.output_file)
-  with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
-    for type, issues in report.items():
-      report_file.write(type + '\n')
-      for issue in issues:
-        report_file.write('\t' + issue + '\n')
-      report_file.write('\n')
-
-
-def sort_rels(rel):
-  # rel = 'Relationship file_id GENERATED_FROM package_id'
-  fields = rel.split(' ')
-  return fields[3] + fields[1]
-
-
-# Validate the metadata generated by Make for installed files and report if there is no metadata.
-def installed_file_has_metadata(installed_file_metadata, report):
-  installed_file = installed_file_metadata['installed_file']
-  module_path = installed_file_metadata['module_path']
-  product_copy_files = installed_file_metadata['product_copy_files']
-  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
-  is_platform_generated = installed_file_metadata['is_platform_generated']
-
-  if (not module_path and
-      not product_copy_files and
-      not kernel_module_copy_files and
-      not is_platform_generated and
-      not installed_file.endswith('.fsv_meta')):
-    report[ISSUE_NO_METADATA].append(installed_file)
-    return False
-
-  return True
-
-
-def report_metadata_file(metadata_file_path, installed_file_metadata, report):
-  if metadata_file_path:
-    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
-        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
-            installed_file_metadata['installed_file'],
-            installed_file_metadata['module_path'],
-            metadata_file_path + '/METADATA'))
-
-    package_metadata = metadata_file_pb2.Metadata()
-    with open(metadata_file_path + '/METADATA', 'rt') as f:
-      text_format.Parse(f.read(), package_metadata)
-
-    if not metadata_file_path in metadata_file_protos:
-      metadata_file_protos[metadata_file_path] = package_metadata
-      if not package_metadata.name:
-        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
-
-      if not package_metadata.third_party.version:
-        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
-            f'{metadata_file_path}/METADATA does not has "third_party.version"')
-
-      for tag in package_metadata.third_party.security.tag:
-        if not tag.startswith(NVD_CPE23):
-          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
-              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
-  else:
-    report[ISSUE_NO_METADATA_FILE].append(
-        "installed_file: {}, module_path: {}".format(
-            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
-
-
-def generate_fragment():
-  with open(args.metadata, newline='') as sbom_metadata_file:
-    reader = csv.DictReader(sbom_metadata_file)
-    for installed_file_metadata in reader:
-      installed_file = installed_file_metadata['installed_file']
-      if args.output_file != args.product_out_dir + installed_file + ".spdx":
-        continue
-
-      module_path = installed_file_metadata['module_path']
-      package_id = new_package_id(encode_for_spdxid(module_path), PKG_PREBUILT)
-      package = new_package_record(package_id, module_path, args.build_version, args.product_mfr)
-      file_id = new_file_id(installed_file)
-      file = new_file_record(file_id, installed_file, checksum(installed_file))
-      relationship = new_relationship_record(file_id, REL_GENERATED_FROM, package_id)
-      records = [package, file, relationship]
-      write_tagvalue_sbom(records)
-      break
-
-
-def main():
-  global args
-  args = get_args()
-  log('Args:', vars(args))
-
-  if args.unbundled:
-    generate_fragment()
-    return
-
-  global metadata_file_protos
-  metadata_file_protos = {}
-
-  doc_id = 'SPDXRef-DOCUMENT'
-  doc_header = new_doc_header(doc_id)
-
-  product_package_id = 'SPDXRef-PRODUCT'
-  product_package = new_package_record(product_package_id, 'PRODUCT', args.build_version, args.product_mfr,
-                                       files_analyzed='true')
-
-  platform_package_id = 'SPDXRef-PLATFORM'
-  platform_package = new_package_record(platform_package_id, 'PLATFORM', args.build_version, args.product_mfr)
-
-  # Report on some issues and information
-  report = {
-      ISSUE_NO_METADATA: [],
-      ISSUE_NO_METADATA_FILE: [],
-      ISSUE_METADATA_FILE_INCOMPLETE: [],
-      ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
-      INFO_METADATA_FOUND_FOR_PACKAGE: []
-  }
-
-  # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
-  product_files = []
-  package_ids = []
-  package_records = []
-  rels_file_gen_from = []
-  with open(args.metadata, newline='') as sbom_metadata_file:
-    reader = csv.DictReader(sbom_metadata_file)
-    for installed_file_metadata in reader:
-      installed_file = installed_file_metadata['installed_file']
-      module_path = installed_file_metadata['module_path']
-      product_copy_files = installed_file_metadata['product_copy_files']
-      kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
-
-      if not installed_file_has_metadata(installed_file_metadata, report):
-        continue
-
-      file_id = new_file_id(installed_file)
-      product_files.append(new_file_record(file_id, installed_file, checksum(installed_file)))
-
-      if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
-        metadata_file_path = get_metadata_file_path(installed_file_metadata)
-        report_metadata_file(metadata_file_path, installed_file_metadata, report)
-
-        # File from source fork packages or prebuilt fork packages
-        external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
-        if len(pkgs) > 0:
-          if external_doc_ref and external_doc_ref not in doc_header[EXTERNAL_DOCUMENT_REF]:
-            doc_header[EXTERNAL_DOCUMENT_REF].append(external_doc_ref)
-          for p in pkgs:
-            if not p[SPDXID] in package_ids:
-              package_ids.append(p[SPDXID])
-              package_records.append(p)
-          for rel in rels:
-            if not rel in package_records:
-              package_records.append(rel)
-          fork_package_id = pkgs[0][SPDXID]  # The first package should be the source/prebuilt fork package
-          rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, fork_package_id))
-      elif module_path or installed_file_metadata['is_platform_generated']:
-        # File from PLATFORM package
-        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-      elif product_copy_files:
-        # Format of product_copy_files: <source path>:<dest path>
-        src_path = product_copy_files.split(':')[0]
-        # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
-        # so process them as files from PLATFORM package
-        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-      elif installed_file.endswith('.fsv_meta'):
-        # See build/make/core/Makefile:2988
-        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-      elif kernel_module_copy_files.startswith('ANDROID-GEN'):
-        # For the four files generated for _dlkm, _ramdisk partitions
-        # See build/make/core/Makefile:323
-        rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-
-  product_package[PACKAGE_VERIFICATION_CODE] = generate_package_verification_code(product_files)
-
-  all_records = [
-      doc_header,
-      product_package,
-      new_relationship_record(doc_id, REL_DESCRIBES, product_package_id),
-  ]
-  all_records += product_files
-  all_records.append(platform_package)
-  all_records += package_records
-  rels_file_gen_from.sort(key=sort_rels)
-  all_records += rels_file_gen_from
-
-  # Save SBOM records to output file
-  doc_header[CREATED] = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
-  write_tagvalue_sbom(all_records)
-  if args.json:
-    write_json_sbom(all_records, product_package_id)
-
-  save_report(report)
-
-
-if __name__ == '__main__':
-  main()
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 38d17a8..31a460d 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -43,7 +43,7 @@
   """Validate GRF properties if exist.
 
   If ro.board.first_api_level is defined, check if its value is valid for the
-  sdk version.
+  sdk version. This is only for the release version.
   Also, validate the value of ro.board.api_level if defined.
 
   Returns:
@@ -51,6 +51,7 @@
   """
   grf_api_level = prop_list.get_value("ro.board.first_api_level")
   board_api_level = prop_list.get_value("ro.board.api_level")
+  platform_version_codename = prop_list.get_value("ro.build.version.codename")
 
   if not grf_api_level:
     if board_api_level:
@@ -61,6 +62,18 @@
     return True
 
   grf_api_level = int(grf_api_level)
+  if board_api_level:
+    board_api_level = int(board_api_level)
+    if board_api_level < grf_api_level:
+      sys.stderr.write("error: ro.board.api_level(%d) must be greater than "
+                       "ro.board.first_api_level(%d)\n"
+                       % (board_api_level, grf_api_level))
+      return False
+
+  # skip sdk version validation for dev-stage non-REL devices
+  if platform_version_codename != "REL":
+    return True
+
   if grf_api_level > sdk_version:
     sys.stderr.write("error: ro.board.first_api_level(%d) must be less than "
                      "or equal to ro.build.version.sdk(%d)\n"
@@ -68,12 +81,10 @@
     return False
 
   if board_api_level:
-    board_api_level = int(board_api_level)
-    if board_api_level < grf_api_level or board_api_level > sdk_version:
-      sys.stderr.write("error: ro.board.api_level(%d) must be neither less "
-                       "than ro.board.first_api_level(%d) nor greater than "
-                       "ro.build.version.sdk(%d)\n"
-                       % (board_api_level, grf_api_level, sdk_version))
+    if board_api_level > sdk_version:
+      sys.stderr.write("error: ro.board.api_level(%d) must be less than or "
+                       "equal to ro.build.version.sdk(%d)\n"
+                       % (board_api_level, sdk_version))
       return False
 
   return True
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 40f7c92..d523701 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -626,7 +626,7 @@
     if os.path.isfile(deapexer_path):
       deapexer = deapexer_path
 
-  for apex_filename in os.listdir(target_dir):
+  for apex_filename in sorted(os.listdir(target_dir)):
     apex_filepath = os.path.join(target_dir, apex_filename)
     if not os.path.isfile(apex_filepath) or \
             not zipfile.is_zipfile(apex_filepath):
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 9bbdc51..3904a78 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -4001,3 +4001,26 @@
     # Magic for android sparse image format
     # https://source.android.com/devices/bootloader/images
     return fp.read(4) == b'\x3A\xFF\x26\xED'
+
+def ParseUpdateEngineConfig(path: str):
+  """Parse the update_engine config stored in file `path`
+  Args
+    path: Path to update_engine_config.txt file in target_files
+
+  Returns
+    A tuple of (major, minor) version number . E.g. (2, 8)
+  """
+  with open(path, "r") as fp:
+    # update_engine_config.txt is only supposed to contain two lines,
+    # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
+    # sufficient. If the length is more than that, something is wrong.
+    data = fp.read(1024)
+    major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
+    if not major:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
+    minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
+    if not minor:
+      raise ValueError(
+          f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
+    return (int(major.group(1)), int(minor.group(1)))
\ No newline at end of file
diff --git a/tools/releasetools/merge/merge_meta.py b/tools/releasetools/merge/merge_meta.py
index 3288ef7..b61f039 100644
--- a/tools/releasetools/merge/merge_meta.py
+++ b/tools/releasetools/merge/merge_meta.py
@@ -29,6 +29,7 @@
 import merge_utils
 import sparse_img
 import verity_utils
+from ota_utils import ParseUpdateEngineConfig
 
 from common import ExternalError
 
@@ -52,28 +53,6 @@
 MODULE_KEY_PATTERN = re.compile(r'name="(.+)\.(apex|apk)"')
 
 
-def ParseUpdateEngineConfig(path: str):
-  """Parse the update_engine config stored in file `path`
-  Args
-    path: Path to update_engine_config.txt file in target_files
-
-  Returns
-    A tuple of (major, minor) version number . E.g. (2, 8)
-  """
-  with open(path, "r") as fp:
-    # update_engine_config.txt is only supposed to contain two lines,
-    # PAYLOAD_MAJOR_VERSION and PAYLOAD_MINOR_VERSION. 1024 should be more than
-    # sufficient. If the length is more than that, something is wrong.
-    data = fp.read(1024)
-    major = re.search(r"PAYLOAD_MAJOR_VERSION=(\d+)", data)
-    if not major:
-      raise ValueError(
-          f"{path} is an invalid update_engine config, missing PAYLOAD_MAJOR_VERSION {data}")
-    minor = re.search(r"PAYLOAD_MINOR_VERSION=(\d+)", data)
-    if not minor:
-      raise ValueError(
-          f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
-    return (int(major.group(1)), int(minor.group(1)))
 
 
 def MergeUpdateEngineConfig(input_metadir1, input_metadir2, merged_meta_dir):
@@ -99,16 +78,16 @@
   """Merges various files in META/*."""
 
   framework_meta_dir = os.path.join(temp_dir, 'framework_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=os.path.dirname(framework_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   vendor_meta_dir = os.path.join(temp_dir, 'vendor_meta', 'META')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=os.path.dirname(vendor_meta_dir),
-      extract_item_list=('META/*',))
+      item_list=('META/*',))
 
   merged_meta_dir = os.path.join(merged_dir, 'META')
 
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 54122b0..8f93688 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -26,9 +26,9 @@
 
 Usage: merge_target_files [args]
 
-  --framework-target-files framework-target-files-zip-archive
+  --framework-target-files framework-target-files-package
       The input target files package containing framework bits. This is a zip
-      archive.
+      archive or a directory.
 
   --framework-item-list framework-item-list-file
       The optional path to a newline-separated config file of items that
@@ -38,9 +38,9 @@
       The optional path to a newline-separated config file of keys to
       extract from the framework META/misc_info.txt file.
 
-  --vendor-target-files vendor-target-files-zip-archive
+  --vendor-target-files vendor-target-files-package
       The input target files package containing vendor bits. This is a zip
-      archive.
+      archive or a directory.
 
   --vendor-item-list vendor-item-list-file
       The optional path to a newline-separated config file of items that
@@ -172,18 +172,18 @@
     Path to merged package under temp directory.
   """
   # Extract "as is" items from the input framework and vendor partial target
-  # files packages directly into the output temporary directory, since these items
-  # do not need special case processing.
+  # files packages directly into the output temporary directory, since these
+  # items do not need special case processing.
 
   output_target_files_temp_dir = os.path.join(temp_dir, 'output')
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.framework_target_files,
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.framework_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.framework_item_list)
-  merge_utils.ExtractItems(
-      input_zip=OPTIONS.vendor_target_files,
+      item_list=OPTIONS.framework_item_list)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
       output_dir=output_target_files_temp_dir,
-      extract_item_list=OPTIONS.vendor_item_list)
+      item_list=OPTIONS.vendor_item_list)
 
   # Perform special case processing on META/* items.
   # After this function completes successfully, all the files we need to create
@@ -231,7 +231,8 @@
   def copy_selinux_file(input_path, output_filename):
     input_filename = os.path.join(target_files_dir, input_path)
     if not os.path.exists(input_filename):
-      input_filename = input_filename.replace('SYSTEM_EXT/', 'SYSTEM/system_ext/') \
+      input_filename = input_filename.replace('SYSTEM_EXT/',
+                                              'SYSTEM/system_ext/') \
           .replace('PRODUCT/', 'SYSTEM/product/')
       if not os.path.exists(input_filename):
         logger.info('Skipping copy_selinux_file for %s', input_filename)
@@ -272,7 +273,10 @@
   vendor_target_files_dir = common.MakeTempDir(
       prefix='merge_target_files_vendor_target_files_')
   common.UnzipToDir(OPTIONS.vendor_otatools, vendor_otatools_dir)
-  common.UnzipToDir(OPTIONS.vendor_target_files, vendor_target_files_dir)
+  merge_utils.CollectTargetFiles(
+      input_zipfile_or_dir=OPTIONS.vendor_target_files,
+      output_dir=vendor_target_files_dir,
+      item_list=OPTIONS.vendor_item_list)
 
   # Copy the partition contents from the merged target-files archive to the
   # vendor target-files archive.
@@ -303,8 +307,9 @@
   shutil.move(
       os.path.join(vendor_target_files_dir, 'IMAGES', partition_img),
       os.path.join(target_files_dir, 'IMAGES', partition_img))
-  move_only_exists(os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
-        os.path.join(target_files_dir, 'IMAGES', partition_map))
+  move_only_exists(
+      os.path.join(vendor_target_files_dir, 'IMAGES', partition_map),
+      os.path.join(target_files_dir, 'IMAGES', partition_map))
 
   def copy_recovery_file(filename):
     for subdir in ('VENDOR', 'SYSTEM/vendor'):
@@ -578,10 +583,10 @@
     common.Usage(__doc__)
     sys.exit(1)
 
-  with zipfile.ZipFile(OPTIONS.framework_target_files, allowZip64=True) as fz:
-    framework_namelist = fz.namelist()
-  with zipfile.ZipFile(OPTIONS.vendor_target_files, allowZip64=True) as vz:
-    vendor_namelist = vz.namelist()
+  framework_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.framework_target_files)
+  vendor_namelist = merge_utils.GetTargetFilesItems(
+      OPTIONS.vendor_target_files)
 
   if OPTIONS.framework_item_list:
     OPTIONS.framework_item_list = common.LoadListFromFile(
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index e056195..c284338 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -49,28 +49,80 @@
   common.UnzipToDir(input_zip, output_dir, filtered_extract_item_list)
 
 
-def CopyItems(from_dir, to_dir, patterns):
-  """Similar to ExtractItems() except uses an input dir instead of zip."""
-  file_paths = []
-  for dirpath, _, filenames in os.walk(from_dir):
-    file_paths.extend(
-        os.path.relpath(path=os.path.join(dirpath, filename), start=from_dir)
-        for filename in filenames)
+def CopyItems(from_dir, to_dir, copy_item_list):
+  """Copies the items in copy_item_list from source to destination directory.
 
-  filtered_file_paths = set()
-  for pattern in patterns:
-    filtered_file_paths.update(fnmatch.filter(file_paths, pattern))
+  copy_item_list may include files and directories. Will copy the matched
+  files and create the matched directories.
 
-  for file_path in filtered_file_paths:
-    original_file_path = os.path.join(from_dir, file_path)
-    copied_file_path = os.path.join(to_dir, file_path)
-    copied_file_dir = os.path.dirname(copied_file_path)
-    if not os.path.exists(copied_file_dir):
-      os.makedirs(copied_file_dir)
-    if os.path.islink(original_file_path):
-      os.symlink(os.readlink(original_file_path), copied_file_path)
+  Args:
+    from_dir: The source directory.
+    to_dir: The destination directory.
+    copy_item_list: Items to be copied.
+  """
+  item_paths = []
+  for root, dirs, files in os.walk(from_dir):
+    item_paths.extend(
+        os.path.relpath(path=os.path.join(root, item_name), start=from_dir)
+        for item_name in files + dirs)
+
+  filtered = set()
+  for pattern in copy_item_list:
+    filtered.update(fnmatch.filter(item_paths, pattern))
+
+  for item in filtered:
+    original_path = os.path.join(from_dir, item)
+    copied_path = os.path.join(to_dir, item)
+    copied_parent_path = os.path.dirname(copied_path)
+    if not os.path.exists(copied_parent_path):
+      os.makedirs(copied_parent_path)
+    if os.path.islink(original_path):
+      os.symlink(os.readlink(original_path), copied_path)
+    elif os.path.isdir(original_path):
+      if not os.path.exists(copied_path):
+        os.makedirs(copied_path)
     else:
-      shutil.copyfile(original_file_path, copied_file_path)
+      shutil.copyfile(original_path, copied_path)
+
+
+def GetTargetFilesItems(target_files_zipfile_or_dir):
+  """Gets a list of target files items."""
+  if zipfile.is_zipfile(target_files_zipfile_or_dir):
+    with zipfile.ZipFile(target_files_zipfile_or_dir, allowZip64=True) as fz:
+      return fz.namelist()
+  elif os.path.isdir(target_files_zipfile_or_dir):
+    item_list = []
+    for root, dirs, files in os.walk(target_files_zipfile_or_dir):
+      item_list.extend(
+          os.path.relpath(path=os.path.join(root, item),
+                          start=target_files_zipfile_or_dir)
+          for item in dirs + files)
+    return item_list
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
+
+
+def CollectTargetFiles(input_zipfile_or_dir, output_dir, item_list=None):
+  """Extracts input zipfile or copy input directory to output directory.
+
+  Extracts the input zipfile if `input_zipfile_or_dir` is a zip archive, or
+  copies the items if `input_zipfile_or_dir` is a directory.
+
+  Args:
+    input_zipfile_or_dir: The input target files, could be either a zipfile to
+      extract or a directory to copy.
+    output_dir: The output directory that the input files are either extracted
+      or copied.
+    item_list: Files to be extracted or copied. Will extract or copy all files
+      if omitted.
+  """
+  patterns = item_list if item_list else ('*',)
+  if zipfile.is_zipfile(input_zipfile_or_dir):
+    ExtractItems(input_zipfile_or_dir, output_dir, patterns)
+  elif os.path.isdir(input_zipfile_or_dir):
+    CopyItems(input_zipfile_or_dir, output_dir, patterns)
+  else:
+    raise ValueError('Target files should be either zipfile or directory.')
 
 
 def WriteSortedData(data, path):
diff --git a/tools/releasetools/merge/test_merge_utils.py b/tools/releasetools/merge/test_merge_utils.py
index 1ae1f54..b4c47ae 100644
--- a/tools/releasetools/merge/test_merge_utils.py
+++ b/tools/releasetools/merge/test_merge_utils.py
@@ -35,22 +35,27 @@
       open(path, 'a').close()
       return path
 
+    def createEmptyFolder(path):
+      os.makedirs(path)
+      return path
+
     def createSymLink(source, dest):
       os.symlink(source, dest)
       return dest
 
     def getRelPaths(start, filepaths):
       return set(
-          os.path.relpath(path=filepath, start=start) for filepath in filepaths)
+          os.path.relpath(path=filepath, start=start)
+          for filepath in filepaths)
 
     input_dir = common.MakeTempDir()
     output_dir = common.MakeTempDir()
     expected_copied_items = []
     actual_copied_items = []
-    patterns = ['*.cpp', 'subdir/*.txt']
+    patterns = ['*.cpp', 'subdir/*.txt', 'subdir/empty_dir']
 
-    # Create various files that we expect to get copied because they
-    # match one of the patterns.
+    # Create various files and empty directories that we expect to get copied
+    # because they match one of the patterns.
     expected_copied_items.extend([
         createEmptyFile(os.path.join(input_dir, 'a.cpp')),
         createEmptyFile(os.path.join(input_dir, 'b.cpp')),
@@ -58,6 +63,7 @@
         createEmptyFile(os.path.join(input_dir, 'subdir', 'd.txt')),
         createEmptyFile(
             os.path.join(input_dir, 'subdir', 'subsubdir', 'e.txt')),
+        createEmptyFolder(os.path.join(input_dir, 'subdir', 'empty_dir')),
         createSymLink('a.cpp', os.path.join(input_dir, 'a_link.cpp')),
     ])
     # Create some more files that we expect to not get copied.
@@ -70,9 +76,13 @@
     merge_utils.CopyItems(input_dir, output_dir, patterns)
 
     # Assert the actual copied items match the ones we expected.
-    for dirpath, _, filenames in os.walk(output_dir):
+    for root_dir, dirs, files in os.walk(output_dir):
       actual_copied_items.extend(
-          os.path.join(dirpath, filename) for filename in filenames)
+          os.path.join(root_dir, filename) for filename in files)
+      for dirname in dirs:
+        dir_path = os.path.join(root_dir, dirname)
+        if not os.listdir(dir_path):
+          actual_copied_items.append(dir_path)
     self.assertEqual(
         getRelPaths(output_dir, actual_copied_items),
         getRelPaths(input_dir, expected_copied_items))
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 043f6ee..2458244 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -423,6 +423,13 @@
   slot will be used. This is to ensure that we always have valid boot, vbmeta,
   bootloader images in the inactive slot.
 
+  After writing system_other to inactive slot's system partiiton,
+  PackageManagerService will read `ro.cp_system_other_odex`, and set
+  `sys.cppreopt` to "requested". Then, according to
+  system/extras/cppreopts/cppreopts.rc , init will mount system_other at
+  /postinstall, and execute `cppreopts` to copy optimized APKs from
+  /postinstall to /data .
+
   Args:
     input_file: The input target-files.zip file.
     skip_postinstall: Whether to skip copying the postinstall config file.
@@ -899,7 +906,8 @@
     OPTIONS.enable_vabc_xor = False
 
   if OPTIONS.vabc_compression_param == "none":
-    logger.info("VABC Compression algorithm is set to 'none', disabling VABC xor")
+    logger.info(
+        "VABC Compression algorithm is set to 'none', disabling VABC xor")
     OPTIONS.enable_vabc_xor = False
   additional_args = []
 
@@ -915,7 +923,6 @@
   elif OPTIONS.partial:
     target_file = GetTargetFilesZipForPartialUpdates(target_file,
                                                      OPTIONS.partial)
-    additional_args += ["--is_partial_update", "true"]
   elif OPTIONS.vabc_compression_param:
     target_file = GetTargetFilesZipForCustomVABCCompression(
         target_file, OPTIONS.vabc_compression_param)
@@ -931,7 +938,8 @@
   # Metadata to comply with Android OTA package format.
   metadata = GetPackageMetadata(target_info, source_info)
   # Generate payload.
-  payload = PayloadGenerator(wipe_user_data=OPTIONS.wipe_user_data)
+  payload = PayloadGenerator(
+      wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
 
   partition_timestamps_flags = []
   # Enforce a max timestamp this payload can be applied on top of.
@@ -958,7 +966,7 @@
 
   additional_args += ["--security_patch_level", security_patch_level]
 
-  additional_args += ["--enable_zucchini",
+  additional_args += ["--enable_zucchini=" +
                       str(OPTIONS.enable_zucchini).lower()]
 
   if not ota_utils.IsLz4diffCompatible(source_file, target_file):
@@ -966,7 +974,7 @@
         "Source build doesn't support lz4diff, or source/target don't have compatible lz4diff versions. Disabling lz4diff.")
     OPTIONS.enable_lz4diff = False
 
-  additional_args += ["--enable_lz4diff",
+  additional_args += ["--enable_lz4diff=" +
                       str(OPTIONS.enable_lz4diff).lower()]
 
   if source_file and OPTIONS.enable_lz4diff:
@@ -982,20 +990,13 @@
     additional_args += ["--erofs_compression_param", erofs_compression_param]
 
   if OPTIONS.disable_vabc:
-    additional_args += ["--disable_vabc", "true"]
+    additional_args += ["--disable_vabc=true"]
   if OPTIONS.enable_vabc_xor:
-    additional_args += ["--enable_vabc_xor", "true"]
-  if OPTIONS.force_minor_version:
-    additional_args += ["--force_minor_version", OPTIONS.force_minor_version]
+    additional_args += ["--enable_vabc_xor=true"]
   if OPTIONS.compressor_types:
     additional_args += ["--compressor_types", OPTIONS.compressor_types]
   additional_args += ["--max_timestamp", max_timestamp]
 
-  if SupportsMainlineGkiUpdates(source_file):
-    logger.warning(
-        "Detected build with mainline GKI, include full boot image.")
-    additional_args.extend(["--full_boot", "true"])
-
   payload.Generate(
       target_file,
       source_file,
@@ -1356,7 +1357,8 @@
           "what(even if data wipe is done), so SPL downgrade on any "
           "release-keys build is not allowed.".format(target_spl, source_spl))
 
-    logger.info("SPL downgrade on %s", target_build_prop.GetProp("ro.build.tags"))
+    logger.info("SPL downgrade on %s",
+                target_build_prop.GetProp("ro.build.tags"))
     if is_spl_downgrade and not OPTIONS.spl_downgrade and not OPTIONS.downgrade:
       raise common.ExternalError(
           "Target security patch level {} is older than source SPL {} applying "
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index e2ce31d..8c26114 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -25,7 +25,7 @@
 from common import (ZipDelete, OPTIONS, MakeTempFile,
                     ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
                     SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
-                    GetRamdiskFormat)
+                    GetRamdiskFormat, ParseUpdateEngineConfig)
 from payload_signer import PayloadSigner
 
 
@@ -135,7 +135,8 @@
     logger.info(f"Signing disabled for output file {output_file}")
     shutil.copy(prelim_signing, output_file)
   else:
-    logger.info(f"Signing the output file {output_file} with key {package_key}")
+    logger.info(
+        f"Signing the output file {output_file} with key {package_key}")
     SignOutput(prelim_signing, output_file, package_key, pw)
 
   # Reopen the final signed zip to double check the streaming metadata.
@@ -721,6 +722,45 @@
   return sourceEntry and targetEntry and sourceEntry == targetEntry
 
 
+def ExtractTargetFiles(path: str):
+  if os.path.isdir(path):
+    logger.info("target files %s is already extracted", path)
+    return path
+  extracted_dir = common.MakeTempDir("target_files")
+  common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN)
+  return extracted_dir
+
+
+def LocatePartitionPath(target_files_dir: str, partition: str, allow_empty):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".img")
+  if os.path.exists(path):
+    return path
+  path = os.path.join(target_files_dir, "IMAGES", partition + ".img")
+  if os.path.exists(path):
+    return path
+  if allow_empty:
+    return ""
+  raise common.ExternalError(
+      "Partition {} not found in target files {}".format(partition, target_files_dir))
+
+
+def GetPartitionImages(target_files_dir: str, ab_partitions, allow_empty=True):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionPath(target_files_dir, partition, allow_empty) for partition in ab_partitions])
+
+
+def LocatePartitionMap(target_files_dir: str, partition: str):
+  path = os.path.join(target_files_dir, "RADIO", partition + ".map")
+  if os.path.exists(path):
+    return path
+  return ""
+
+
+def GetPartitionMaps(target_files_dir: str, ab_partitions):
+  assert os.path.isdir(target_files_dir)
+  return ":".join([LocatePartitionMap(target_files_dir, partition) for partition in ab_partitions])
+
+
 class PayloadGenerator(object):
   """Manages the creation and the signing of an A/B OTA Payload."""
 
@@ -729,7 +769,7 @@
   SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
   SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
 
-  def __init__(self, secondary=False, wipe_user_data=False):
+  def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
     """Initializes a Payload instance.
 
     Args:
@@ -739,6 +779,8 @@
     self.payload_properties = None
     self.secondary = secondary
     self.wipe_user_data = wipe_user_data
+    self.minor_version = minor_version
+    self.is_partial_update = is_partial_update
 
   def _Run(self, cmd):  # pylint: disable=no-self-use
     # Don't pipe (buffer) the output if verbose is set. Let
@@ -757,21 +799,56 @@
       source_file: The filename of the source build target-files zip; or None if
           generating a full OTA.
       additional_args: A list of additional args that should be passed to
-          brillo_update_payload script; or None.
+          delta_generator binary; or None.
     """
     if additional_args is None:
       additional_args = []
 
     payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
-    cmd = ["brillo_update_payload", "generate",
-           "--payload", payload_file,
-           "--target_image", target_file]
+    target_dir = ExtractTargetFiles(target_file)
+    cmd = ["delta_generator",
+           "--out_file", payload_file]
+    with open(os.path.join(target_dir, "META", "ab_partitions.txt")) as fp:
+      ab_partitions = fp.read().strip().split("\n")
+    cmd.extend(["--partition_names", ":".join(ab_partitions)])
+    cmd.extend(
+        ["--new_partitions", GetPartitionImages(target_dir, ab_partitions, False)])
+    cmd.extend(
+        ["--new_mapfiles", GetPartitionMaps(target_dir, ab_partitions)])
     if source_file is not None:
-      cmd.extend(["--source_image", source_file])
+      source_dir = ExtractTargetFiles(source_file)
+      cmd.extend(
+          ["--old_partitions", GetPartitionImages(source_dir, ab_partitions, True)])
+      cmd.extend(
+          ["--old_mapfiles", GetPartitionMaps(source_dir, ab_partitions)])
+
       if OPTIONS.disable_fec_computation:
-        cmd.extend(["--disable_fec_computation", "true"])
+        cmd.extend(["--disable_fec_computation=true"])
       if OPTIONS.disable_verity_computation:
-        cmd.extend(["--disable_verity_computation", "true"])
+        cmd.extend(["--disable_verity_computation=true"])
+    postinstall_config = os.path.join(
+        target_dir, "META", "postinstall_config.txt")
+
+    if os.path.exists(postinstall_config):
+      cmd.extend(["--new_postinstall_config_file", postinstall_config])
+    dynamic_partition_info = os.path.join(
+        target_dir, "META", "dynamic_partitions_info.txt")
+
+    if os.path.exists(dynamic_partition_info):
+      cmd.extend(["--dynamic_partition_info_file", dynamic_partition_info])
+
+    major_version, minor_version = ParseUpdateEngineConfig(
+        os.path.join(target_dir, "META", "update_engine_config.txt"))
+    if source_file:
+      major_version, minor_version = ParseUpdateEngineConfig(
+          os.path.join(source_dir, "META", "update_engine_config.txt"))
+    if self.minor_version:
+      minor_version = self.minor_version
+    cmd.extend(["--major_version", str(major_version)])
+    if source_file is not None or self.is_partial_update:
+      cmd.extend(["--minor_version", str(minor_version)])
+    if self.is_partial_update:
+      cmd.extend(["--is_partial_update=true"])
     cmd.extend(additional_args)
     self._Run(cmd)
 
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 8c9655ad0..0e4626b 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -452,12 +452,14 @@
         test_file.write(bytes(data))
       test_file.close()
 
-      expected_stat = os.stat(test_file_name)
       expected_mode = extra_zipwrite_args.get("perms", 0o644)
       expected_compress_type = extra_zipwrite_args.get("compress_type",
                                                        zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
+      expected_stat = os.stat(test_file_name)
       common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
       zip_file.close()
 
@@ -480,8 +482,6 @@
     try:
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
-
       if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
         arcname = zinfo_or_arcname
         expected_mode = extra_args.get("perms", 0o644)
@@ -528,11 +528,13 @@
         test_file.write(data)
       test_file.close()
 
+      # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+      # the timestamp after writing.
+      os.utime(test_file_name, (1234567, 1234567))
       expected_stat = os.stat(test_file_name)
       expected_mode = 0o644
       expected_compress_type = extra_args.get("compress_type",
                                               zipfile.ZIP_STORED)
-      time.sleep(5)  # Make sure the atime/mtime will change measurably.
 
       common.ZipWrite(zip_file, test_file_name, **extra_args)
       common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
new file mode 100644
index 0000000..f6c0190
--- /dev/null
+++ b/tools/sbom/Android.bp
@@ -0,0 +1,53 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+python_binary_host {
+    name: "generate-sbom",
+    srcs: [
+        "generate-sbom.py",
+    ],
+    version: {
+        py3: {
+            embedded_launcher: true,
+        },
+    },
+    libs: [
+        "metadata_file_proto_py",
+        "libprotobuf-python",
+        "sbom_lib",
+    ],
+}
+
+python_library_host {
+    name: "sbom_lib",
+    srcs: [
+        "sbom_data.py",
+        "sbom_writers.py",
+    ],
+}
+
+python_test_host {
+    name: "sbom_writers_test",
+    main: "sbom_writers_test.py",
+    srcs: [
+        "sbom_writers_test.py",
+    ],
+    data: [
+        "testdata/*",
+    ],
+    libs: [
+        "sbom_lib",
+    ],
+    test_suites: ["general-tests"],
+}
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
new file mode 100755
index 0000000..0c5deb2
--- /dev/null
+++ b/tools/sbom/generate-sbom.py
@@ -0,0 +1,536 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+  generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+                   --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+                   --product_out_dir=out/target/product/vsoc_x86_64 \
+                   --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+                   --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+
+def get_args():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+  parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+  parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+  parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
+  parser.add_argument('--build_version', required=True, help='The build version.')
+  parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+  parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+  parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
+
+  return parser.parse_args()
+
+
+def log(*info):
+  if args.verbose:
+    for i in info:
+      print(i)
+
+
+def encode_for_spdxid(s):
+  """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+  result = ''
+  for c in s:
+    if c.isalnum() or c in '.-':
+      result += c
+    elif c in '_@/':
+      result += '-'
+    else:
+      result += '0x' + c.encode('utf-8').hex()
+
+  return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+  return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+  return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def checksum(file_path):
+  file_path = args.product_out_dir + '/' + file_path
+  h = hashlib.sha1()
+  if os.path.islink(file_path):
+    h.update(os.readlink(file_path).encode('utf-8'))
+  else:
+    with open(file_path, 'rb') as f:
+      h.update(f.read())
+  return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+  return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
+      'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
+      'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
+      'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
+      'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
+      'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
+      'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
+      'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
+      'vndk_prebuilt_shared',
+
+      # 'android_test_import',
+      # 'cc_prebuilt_test_library_shared',
+      # 'java_import_host',
+      # 'java_test_import',
+      # 'llvm_host_prebuilt_library_shared',
+      # 'prebuilt_apis',
+      # 'prebuilt_build_tool',
+      # 'prebuilt_defaults',
+      # 'prebuilt_etc',
+      # 'prebuilt_etc_host',
+      # 'prebuilt_etc_xml',
+      # 'prebuilt_font',
+      # 'prebuilt_hidl_interfaces',
+      # 'prebuilt_platform_compat_config',
+      # 'prebuilt_stubs_sources',
+      # 'prebuilt_usr_share',
+      # 'prebuilt_usr_share_host',
+      # 'soong_config_module_type_import',
+  ]
+
+
+def is_source_package(file_metadata):
+  module_path = file_metadata['module_path']
+  return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+  module_path = file_metadata['module_path']
+  if module_path:
+    return (module_path.startswith('prebuilts/') or
+            is_soong_prebuilt_module(file_metadata) or
+            file_metadata['is_prebuilt_make_module'])
+
+  kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+  if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+    return True
+
+  return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+  """Return source package info exists in its METADATA file, currently including name, security tag
+  and external SBOM reference.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  if not metadata_file_path:
+    return file_metadata['module_path'], []
+
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  external_refs = []
+  for tag in metadata_proto.third_party.security.tag:
+    if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe23Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+    elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+      external_refs.append(
+        sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+                                     type=sbom_data.PackageExternalRefType.cpe22Type,
+                                     locator=tag.removeprefix(NVD_CPE23)))
+
+  if metadata_proto.name:
+    return metadata_proto.name, external_refs
+  else:
+    return os.path.basename(metadata_file_path), external_refs  # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+  """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+  module path or kernel module's source path if the installed file is a kernel module.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  name = None
+  if metadata_file_path:
+    metadata_proto = metadata_file_protos[metadata_file_path]
+    if metadata_proto.name:
+      name = metadata_proto.name
+    else:
+      name = metadata_file_path
+  elif file_metadata['module_path']:
+    name = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+    name = os.path.dirname(src_path)
+
+  return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+  """Search for METADATA file of a package and return its path."""
+  metadata_path = ''
+  if file_metadata['module_path']:
+    metadata_path = file_metadata['module_path']
+  elif file_metadata['kernel_module_copy_files']:
+    metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+  while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+    metadata_path = os.path.dirname(metadata_path)
+
+  return metadata_path
+
+
+def get_package_version(metadata_file_path):
+  """Return a package's version in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+  """Return a package's homepage URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.homepage:
+    return metadata_proto.third_party.homepage
+  for url in metadata_proto.third_party.url:
+    if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+      return url.value
+
+  return None
+
+
+def get_package_download_location(metadata_file_path):
+  """Return a package's code repository URL in its METADATA file."""
+  if not metadata_file_path:
+    return None
+  metadata_proto = metadata_file_protos[metadata_file_path]
+  if metadata_proto.third_party.url:
+    urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+    if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+      return urls[0].value
+    elif len(urls) > 1:
+      return urls[1].value
+
+  return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+  """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+  package, a UPSTREAM package if it's a source package and a external SBOM document reference if
+  it's a prebuilt package with sbom_ref defined in its METADATA file.
+
+  See go/android-spdx and go/android-sbom-gen for more details.
+  """
+  external_doc_ref = None
+  packages = []
+  relationships = []
+
+  # Info from METADATA file
+  homepage = get_package_homepage(metadata_file_path)
+  version = get_package_version(metadata_file_path)
+  download_location = get_package_download_location(metadata_file_path)
+
+  if is_source_package(installed_file_metadata):
+    # Source fork packages
+    name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+    source_package_id = new_package_id(name, PKG_SOURCE)
+    source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+                                       supplier='Organization: ' + args.product_mfr,
+                                       external_refs=external_refs)
+
+    upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+    upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+                                         supplier='Organization: ' + homepage if homepage else None,
+                                         download_location=download_location)
+    packages += [source_package, upstream_package]
+    relationships.append(sbom_data.Relationship(id1=source_package_id,
+                                                relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                id2=upstream_package_id))
+  elif is_prebuilt_package(installed_file_metadata):
+    # Prebuilt fork packages
+    name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+    prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+    prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+                                         name=name,
+                                         version=args.build_version,
+                                         supplier='Organization: ' + args.product_mfr)
+    packages.append(prebuilt_package)
+
+    if metadata_file_path:
+      metadata_proto = metadata_file_protos[metadata_file_path]
+      if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+        sbom_url = metadata_proto.third_party.sbom_ref.url
+        sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+        upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+        if sbom_url and sbom_checksum and upstream_element_id:
+          doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+          external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+                                                                 uri=sbom_url,
+                                                                 checksum=sbom_checksum)
+          relationships.append(
+            sbom_data.Relationship(id1=prebuilt_package_id,
+                                   relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                   id2=doc_ref_id + ':' + upstream_element_id))
+
+  return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+  checksums = [file.checksum for file in files]
+  checksums.sort()
+  h = hashlib.sha1()
+  h.update(''.join(checksums).encode(encoding='utf-8'))
+  return h.hexdigest()
+
+
+def save_report(report):
+  prefix, _ = os.path.splitext(args.output_file)
+  with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
+    for type, issues in report.items():
+      report_file.write(type + '\n')
+      for issue in issues:
+        report_file.write('\t' + issue + '\n')
+      report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+  installed_file = installed_file_metadata['installed_file']
+  module_path = installed_file_metadata['module_path']
+  product_copy_files = installed_file_metadata['product_copy_files']
+  kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+  is_platform_generated = installed_file_metadata['is_platform_generated']
+
+  if (not module_path and
+      not product_copy_files and
+      not kernel_module_copy_files and
+      not is_platform_generated and
+      not installed_file.endswith('.fsv_meta')):
+    report[ISSUE_NO_METADATA].append(installed_file)
+    return False
+
+  return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+  if metadata_file_path:
+    report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+        'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+            installed_file_metadata['installed_file'],
+            installed_file_metadata['module_path'],
+            metadata_file_path + '/METADATA'))
+
+    package_metadata = metadata_file_pb2.Metadata()
+    with open(metadata_file_path + '/METADATA', 'rt') as f:
+      text_format.Parse(f.read(), package_metadata)
+
+    if not metadata_file_path in metadata_file_protos:
+      metadata_file_protos[metadata_file_path] = package_metadata
+      if not package_metadata.name:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+      if not package_metadata.third_party.version:
+        report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+            f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+      for tag in package_metadata.third_party.security.tag:
+        if not tag.startswith(NVD_CPE23):
+          report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+              f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+  else:
+    report[ISSUE_NO_METADATA_FILE].append(
+        "installed_file: {}, module_path: {}".format(
+            installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_sbom_for_unbundled():
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    doc = sbom_data.Document(name=args.build_version,
+                             namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                             creators=['Organization: ' + args.product_mfr])
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      if args.output_file != args.product_out_dir + installed_file + ".spdx":
+        continue
+
+      module_path = installed_file_metadata['module_path']
+      package_id = new_package_id(module_path, PKG_PREBUILT)
+      package = sbom_data.Package(id=package_id,
+                                  name=module_path,
+                                  version=args.build_version,
+                                  supplier='Organization: ' + args.product_mfr)
+      file_id = new_file_id(installed_file)
+      file = sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file))
+      relationship = sbom_data.Relationship(id1=file_id,
+                                            relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                            id2=package_id)
+      doc.add_package(package)
+      doc.files.append(file)
+      doc.describes = file_id
+      doc.add_relationship(relationship)
+      doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+      break
+
+  with open(args.output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file, fragment=True)
+
+
+def main():
+  global args
+  args = get_args()
+  log('Args:', vars(args))
+
+  if args.unbundled:
+    generate_sbom_for_unbundled()
+    return
+
+  global metadata_file_protos
+  metadata_file_protos = {}
+
+  doc = sbom_data.Document(name=args.build_version,
+                           namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+                           creators=['Organization: ' + args.product_mfr])
+
+  product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                                      name=sbom_data.PACKAGE_NAME_PRODUCT,
+                                      version=args.build_version,
+                                      supplier='Organization: ' + args.product_mfr,
+                                      files_analyzed=True)
+  doc.packages.append(product_package)
+
+  doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                                        version=args.build_version,
+                                        supplier='Organization: ' + args.product_mfr))
+
+  # Report on some issues and information
+  report = {
+    ISSUE_NO_METADATA: [],
+    ISSUE_NO_METADATA_FILE: [],
+    ISSUE_METADATA_FILE_INCOMPLETE: [],
+    ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+    ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+    INFO_METADATA_FOUND_FOR_PACKAGE: [],
+  }
+
+  # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+  with open(args.metadata, newline='') as sbom_metadata_file:
+    reader = csv.DictReader(sbom_metadata_file)
+    for installed_file_metadata in reader:
+      installed_file = installed_file_metadata['installed_file']
+      module_path = installed_file_metadata['module_path']
+      product_copy_files = installed_file_metadata['product_copy_files']
+      kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+
+      if not installed_file_has_metadata(installed_file_metadata, report):
+        continue
+      file_path = args.product_out_dir + '/' + installed_file
+      if not (os.path.islink(file_path) or os.path.isfile(file_path)):
+        report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+        continue
+
+      file_id = new_file_id(installed_file)
+      doc.files.append(
+        sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file)))
+      product_package.file_ids.append(file_id)
+
+      if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+        metadata_file_path = get_metadata_file_path(installed_file_metadata)
+        report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+        # File from source fork packages or prebuilt fork packages
+        external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+        if len(pkgs) > 0:
+          if external_doc_ref:
+            doc.add_external_ref(external_doc_ref)
+          for p in pkgs:
+            doc.add_package(p)
+          for rel in rels:
+            doc.add_relationship(rel)
+          fork_package_id = pkgs[0].id  # The first package should be the source/prebuilt fork package
+          doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                      relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                      id2=fork_package_id))
+      elif module_path or installed_file_metadata['is_platform_generated']:
+        # File from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif product_copy_files:
+        # Format of product_copy_files: <source path>:<dest path>
+        src_path = product_copy_files.split(':')[0]
+        # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+        # so process them as files from PLATFORM package
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif installed_file.endswith('.fsv_meta'):
+        # See build/make/core/Makefile:2988
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+      elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+        # For the four files generated for _dlkm, _ramdisk partitions
+        # See build/make/core/Makefile:323
+        doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                    id2=sbom_data.SPDXID_PLATFORM))
+
+  product_package.verification_code = generate_package_verification_code(doc.files)
+
+  # Save SBOM records to output file
+  doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+  with open(args.output_file, 'w', encoding="utf-8") as file:
+    sbom_writers.TagValueWriter.write(doc, file)
+  if args.json:
+    with open(args.output_file+'.json', 'w', encoding="utf-8") as file:
+      sbom_writers.JSONWriter.write(doc, file)
+
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
new file mode 100644
index 0000000..0c380f6
--- /dev/null
+++ b/tools/sbom/sbom_data.py
@@ -0,0 +1,120 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Define data classes that model SBOMs defined by SPDX. The data classes could be
+written out to different formats (tagvalue, JSON, etc) of SPDX with corresponding
+writer utilities.
+
+Rrefer to SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/ and go/android-spdx for details of
+fields in each data class.
+"""
+
+from dataclasses import dataclass, field
+from typing import List
+
+SPDXID_DOC = 'SPDXRef-DOCUMENT'
+SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
+SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+
+PACKAGE_NAME_PRODUCT = 'PRODUCT'
+PACKAGE_NAME_PLATFORM = 'PLATFORM'
+
+
+class PackageExternalRefCategory:
+  SECURITY = 'SECURITY'
+  PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+  PERSISTENT_ID = 'PERSISTENT-ID'
+  OTHER = 'OTHER'
+
+
+class PackageExternalRefType:
+  cpe22Type = 'cpe22Type'
+  cpe23Type = 'cpe23Type'
+
+
+@dataclass
+class PackageExternalRef:
+  category: PackageExternalRefCategory
+  type: PackageExternalRefType
+  locator: str
+
+
+@dataclass
+class Package:
+  name: str
+  id: str
+  version: str = None
+  supplier: str = None
+  download_location: str = None
+  files_analyzed: bool = False
+  verification_code: str = None
+  file_ids: List[str] = field(default_factory=list)
+  external_refs: List[PackageExternalRef] = field(default_factory=list)
+
+
+@dataclass
+class File:
+  id: str
+  name: str
+  checksum: str
+
+
+class RelationshipType:
+  DESCRIBES = 'DESCRIBES'
+  VARIANT_OF = 'VARIANT_OF'
+  GENERATED_FROM = 'GENERATED_FROM'
+
+
+@dataclass
+class Relationship:
+  id1: str
+  relationship: RelationshipType
+  id2: str
+
+
+@dataclass
+class DocumentExternalReference:
+  id: str
+  uri: str
+  checksum: str
+
+
+@dataclass
+class Document:
+  name: str
+  namespace: str
+  id: str = SPDXID_DOC
+  describes: str = SPDXID_PRODUCT
+  creators: List[str] = field(default_factory=list)
+  created: str = None
+  external_refs: List[DocumentExternalReference] = field(default_factory=list)
+  packages: List[Package] = field(default_factory=list)
+  files: List[File] = field(default_factory=list)
+  relationships: List[Relationship] = field(default_factory=list)
+
+  def add_external_ref(self, external_ref):
+    if not any(external_ref.uri == ref.uri for ref in self.external_refs):
+      self.external_refs.append(external_ref)
+
+  def add_package(self, package):
+    if not any(package.id == p.id for p in self.packages):
+      self.packages.append(package)
+
+  def add_relationship(self, rel):
+    if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
+               for r in self.relationships):
+      self.relationships.append(rel)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
new file mode 100644
index 0000000..66aa6b4
--- /dev/null
+++ b/tools/sbom/sbom_writers.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Serialize objects defined in package sbom_data to SPDX format: tagvalue, JSON.
+"""
+
+import json
+import sbom_data
+
+SPDX_VER = 'SPDX-2.3'
+DATA_LIC = 'CC0-1.0'
+
+
+class Tags:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'SPDXVersion'
+  DATA_LICENSE = 'DataLicense'
+  DOCUMENT_NAME = 'DocumentName'
+  DOCUMENT_NAMESPACE = 'DocumentNamespace'
+  CREATED = 'Created'
+  CREATOR = 'Creator'
+  EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+  # Package
+  PACKAGE_NAME = 'PackageName'
+  PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+  PACKAGE_VERSION = 'PackageVersion'
+  PACKAGE_SUPPLIER = 'PackageSupplier'
+  FILES_ANALYZED = 'FilesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+  PACKAGE_EXTERNAL_REF = 'ExternalRef'
+  # Package license
+  PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+  PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+  PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+  PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+  # File
+  FILE_NAME = 'FileName'
+  FILE_CHECKSUM = 'FileChecksum'
+  # File license
+  FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+  FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+  FILE_LICENSE_COMMENTS = 'LicenseComments'
+  FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+  FILE_NOTICE = 'FileNotice'
+  FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+  # Relationship
+  RELATIONSHIP = 'Relationship'
+
+
+class TagValueWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = [
+      f'{Tags.SPDX_VERSION}: {SPDX_VER}',
+      f'{Tags.DATA_LICENSE}: {DATA_LIC}',
+      f'{Tags.SPDXID}: {sbom_doc.id}',
+      f'{Tags.DOCUMENT_NAME}: {sbom_doc.name}',
+      f'{Tags.DOCUMENT_NAMESPACE}: {sbom_doc.namespace}',
+    ]
+    for creator in sbom_doc.creators:
+      headers.append(f'{Tags.CREATOR}: {creator}')
+    headers.append(f'{Tags.CREATED}: {sbom_doc.created}')
+    for doc_ref in sbom_doc.external_refs:
+      headers.append(
+        f'{Tags.EXTERNAL_DOCUMENT_REF}: {doc_ref.id} {doc_ref.uri} {doc_ref.checksum}')
+    headers.append('')
+    return headers
+
+  @staticmethod
+  def marshal_package(package):
+    download_location = 'NONE'
+    if package.download_location:
+      download_location = package.download_location
+    tagvalues = [
+      f'{Tags.PACKAGE_NAME}: {package.name}',
+      f'{Tags.SPDXID}: {package.id}',
+      f'{Tags.PACKAGE_DOWNLOAD_LOCATION}: {download_location}',
+      f'{Tags.FILES_ANALYZED}: {str(package.files_analyzed).lower()}',
+    ]
+    if package.version:
+      tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
+    if package.supplier:
+      tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+    if package.verification_code:
+      tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
+    if package.external_refs:
+      for external_ref in package.external_refs:
+        tagvalues.append(
+          f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
+
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def marshal_described_element(sbom_doc):
+    if not sbom_doc.describes:
+      return None
+
+    product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
+    if product_package:
+      tagvalues = TagValueWriter.marshal_package(product_package[0])
+      tagvalues.append(
+        f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+      tagvalues.append('')
+      return tagvalues
+
+    file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
+    if file:
+      tagvalues = [
+        f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}'
+      ]
+
+      return tagvalues
+
+    return None
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    tagvalues = []
+    marshaled_relationships = []
+    i = 0
+    packages = sbom_doc.packages
+    while i < len(packages):
+      if packages[i].id == sbom_doc.describes:
+        i += 1
+        continue
+
+      if i + 1 < len(packages) \
+          and packages[i].id.startswith('SPDXRef-SOURCE-') \
+          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+        rel = next((r for r in sbom_doc.relationships if
+                    r.id1 == packages[i].id and
+                    r.id2 == packages[i + 1].id and
+                    r.relationship == sbom_data.RelationshipType.VARIANT_OF), None)
+        if rel:
+          marshaled_relationships.append(rel)
+          tagvalues.append(TagValueWriter.marshal_relationship(rel))
+          tagvalues.append('')
+
+        i += 2
+      else:
+        tagvalues += TagValueWriter.marshal_package(packages[i])
+        i += 1
+
+    return tagvalues, marshaled_relationships
+
+  @staticmethod
+  def marshal_file(file):
+    tagvalues = [
+      f'{Tags.FILE_NAME}: {file.name}',
+      f'{Tags.SPDXID}: {file.id}',
+      f'{Tags.FILE_CHECKSUM}: {file.checksum}',
+      '',
+    ]
+
+    return tagvalues
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    tagvalues = []
+    for file in sbom_doc.files:
+      tagvalues += TagValueWriter.marshal_file(file)
+    return tagvalues
+
+  @staticmethod
+  def marshal_relationship(rel):
+    return f'{Tags.RELATIONSHIP}: {rel.id1} {rel.relationship} {rel.id2}'
+
+  @staticmethod
+  def marshal_relationships(sbom_doc, marshaled_rels):
+    tagvalues = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.id2 + r.id1)
+    for rel in sorted_rels:
+      if any(r.id1 == rel.id1 and r.id2 == rel.id2 and r.relationship == rel.relationship
+             for r in marshaled_rels):
+        continue
+      tagvalues.append(TagValueWriter.marshal_relationship(rel))
+    tagvalues.append('')
+    return tagvalues
+
+  @staticmethod
+  def write(sbom_doc, file, fragment=False):
+    content = []
+    if not fragment:
+      content += TagValueWriter.marshal_doc_headers(sbom_doc)
+      described_element = TagValueWriter.marshal_described_element(sbom_doc)
+      if described_element:
+        content += described_element
+    content += TagValueWriter.marshal_files(sbom_doc)
+    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+    content += tagvalues
+    content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+    file.write('\n'.join(content))
+
+
+class PropNames:
+  # Common
+  SPDXID = 'SPDXID'
+  SPDX_VERSION = 'spdxVersion'
+  DATA_LICENSE = 'dataLicense'
+  NAME = 'name'
+  DOCUMENT_NAMESPACE = 'documentNamespace'
+  CREATION_INFO = 'creationInfo'
+  CREATORS = 'creators'
+  CREATED = 'created'
+  EXTERNAL_DOCUMENT_REF = 'externalDocumentRefs'
+  DOCUMENT_DESCRIBES = 'documentDescribes'
+  EXTERNAL_DOCUMENT_ID = 'externalDocumentId'
+  EXTERNAL_DOCUMENT_URI = 'spdxDocument'
+  EXTERNAL_DOCUMENT_CHECKSUM = 'checksum'
+  ALGORITHM = 'algorithm'
+  CHECKSUM_VALUE = 'checksumValue'
+
+  # Package
+  PACKAGES = 'packages'
+  PACKAGE_DOWNLOAD_LOCATION = 'downloadLocation'
+  PACKAGE_VERSION = 'versionInfo'
+  PACKAGE_SUPPLIER = 'supplier'
+  FILES_ANALYZED = 'filesAnalyzed'
+  PACKAGE_VERIFICATION_CODE = 'packageVerificationCode'
+  PACKAGE_VERIFICATION_CODE_VALUE = 'packageVerificationCodeValue'
+  PACKAGE_EXTERNAL_REFS = 'externalRefs'
+  PACKAGE_EXTERNAL_REF_CATEGORY = 'referenceCategory'
+  PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
+  PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
+  PACKAGE_HAS_FILES = 'hasFiles'
+
+  # File
+  FILES = 'files'
+  FILE_NAME = 'fileName'
+  FILE_CHECKSUMS = 'checksums'
+
+  # Relationship
+  RELATIONSHIPS = 'relationships'
+  REL_ELEMENT_ID = 'spdxElementId'
+  REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
+  REL_TYPE = 'relationshipType'
+
+
+class JSONWriter:
+  @staticmethod
+  def marshal_doc_headers(sbom_doc):
+    headers = {
+      PropNames.SPDX_VERSION: SPDX_VER,
+      PropNames.DATA_LICENSE: DATA_LIC,
+      PropNames.SPDXID: sbom_doc.id,
+      PropNames.NAME: sbom_doc.name,
+      PropNames.DOCUMENT_NAMESPACE: sbom_doc.namespace,
+      PropNames.CREATION_INFO: {}
+    }
+    creators = [creator for creator in sbom_doc.creators]
+    headers[PropNames.CREATION_INFO][PropNames.CREATORS] = creators
+    headers[PropNames.CREATION_INFO][PropNames.CREATED] = sbom_doc.created
+    external_refs = []
+    for doc_ref in sbom_doc.external_refs:
+      checksum = doc_ref.checksum.split(': ')
+      external_refs.append({
+        PropNames.EXTERNAL_DOCUMENT_ID: f'{doc_ref.id}',
+        PropNames.EXTERNAL_DOCUMENT_URI: doc_ref.uri,
+        PropNames.EXTERNAL_DOCUMENT_CHECKSUM: {
+          PropNames.ALGORITHM: checksum[0],
+          PropNames.CHECKSUM_VALUE: checksum[1]
+        }
+      })
+    if external_refs:
+      headers[PropNames.EXTERNAL_DOCUMENT_REF] = external_refs
+    headers[PropNames.DOCUMENT_DESCRIBES] = [sbom_doc.describes]
+
+    return headers
+
+  @staticmethod
+  def marshal_packages(sbom_doc):
+    packages = []
+    for p in sbom_doc.packages:
+      package = {
+        PropNames.NAME: p.name,
+        PropNames.SPDXID: p.id,
+        PropNames.PACKAGE_DOWNLOAD_LOCATION: p.download_location if p.download_location else 'NONE',
+        PropNames.FILES_ANALYZED: p.files_analyzed
+      }
+      if p.version:
+        package[PropNames.PACKAGE_VERSION] = p.version
+      if p.supplier:
+        package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+      if p.verification_code:
+        package[PropNames.PACKAGE_VERIFICATION_CODE] = {
+          PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
+        }
+      if p.external_refs:
+        package[PropNames.PACKAGE_EXTERNAL_REFS] = []
+        for ref in p.external_refs:
+          ext_ref = {
+            PropNames.PACKAGE_EXTERNAL_REF_CATEGORY: ref.category,
+            PropNames.PACKAGE_EXTERNAL_REF_TYPE: ref.type,
+            PropNames.PACKAGE_EXTERNAL_REF_LOCATOR: ref.locator,
+          }
+          package[PropNames.PACKAGE_EXTERNAL_REFS].append(ext_ref)
+      if p.file_ids:
+        package[PropNames.PACKAGE_HAS_FILES] = []
+        for file_id in p.file_ids:
+          package[PropNames.PACKAGE_HAS_FILES].append(file_id)
+
+      packages.append(package)
+
+    return {PropNames.PACKAGES: packages}
+
+  @staticmethod
+  def marshal_files(sbom_doc):
+    files = []
+    for f in sbom_doc.files:
+      file = {
+        PropNames.FILE_NAME: f.name,
+        PropNames.SPDXID: f.id
+      }
+      checksum = f.checksum.split(': ')
+      file[PropNames.FILE_CHECKSUMS] = [{
+        PropNames.ALGORITHM: checksum[0],
+        PropNames.CHECKSUM_VALUE: checksum[1],
+      }]
+      files.append(file)
+    return {PropNames.FILES: files}
+
+  @staticmethod
+  def marshal_relationships(sbom_doc):
+    relationships = []
+    sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.relationship + r.id2 + r.id1)
+    for r in sorted_rels:
+      rel = {
+        PropNames.REL_ELEMENT_ID: r.id1,
+        PropNames.REL_RELATED_ELEMENT_ID: r.id2,
+        PropNames.REL_TYPE: r.relationship,
+      }
+      relationships.append(rel)
+
+    return {PropNames.RELATIONSHIPS: relationships}
+
+  @staticmethod
+  def write(sbom_doc, file):
+    doc = {}
+    doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
+    doc.update(JSONWriter.marshal_packages(sbom_doc))
+    doc.update(JSONWriter.marshal_files(sbom_doc))
+    doc.update(JSONWriter.marshal_relationships(sbom_doc))
+    file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
new file mode 100644
index 0000000..4db2bb7
--- /dev/null
+++ b/tools/sbom/sbom_writers_test.py
@@ -0,0 +1,153 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import pathlib
+import unittest
+import sbom_data
+import sbom_writers
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+
+
+class SBOMWritersTest(unittest.TestCase):
+
+  def setUp(self):
+    # SBOM of a product
+    self.sbom_doc = sbom_data.Document(name='test doc',
+                                       namespace='http://www.google.com/sbom/spdx/android',
+                                       creators=[SUPPLIER_GOOGLE],
+                                       created='2023-03-31T22:17:58Z',
+                                       describes=sbom_data.SPDXID_PRODUCT)
+    self.sbom_doc.add_external_ref(
+      sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+                                          uri='external_doc_uri',
+                                          checksum='SHA1: 1234567890'))
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+                        name=sbom_data.PACKAGE_NAME_PRODUCT,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        files_analyzed=True,
+                        verification_code='123456',
+                        file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3]))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+                        name=sbom_data.PACKAGE_NAME_PLATFORM,
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+                        name='Prebuilt package1',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Source package1',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT,
+                        external_refs=[sbom_data.PackageExternalRef(
+                          category=sbom_data.PackageExternalRefCategory.SECURITY,
+                          type=sbom_data.PackageExternalRefType.cpe22Type,
+                          locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+                        ))
+
+    self.sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+                        name='Upstream package1',
+                        supplier=SUPPLIER_UPSTREAM,
+                        version='1.1',
+                        ))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+                                                          relationship=sbom_data.RelationshipType.VARIANT_OF,
+                                                          id2=SPDXID_UPSTREAM_PACKAGE1))
+
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=sbom_data.SPDXID_PLATFORM))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_PREBUILT_PACKAGE1))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+                                                          relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                          id2=SPDXID_SOURCE_PACKAGE1
+                                                          ))
+
+    # SBOM fragment of a APK
+    self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
+                                                 namespace='http://www.google.com/sbom/spdx/android',
+                                                 creators=[SUPPLIER_GOOGLE],
+                                                 created='2023-03-31T22:17:58Z',
+                                                 describes=SPDXID_FILE1)
+
+    self.unbundled_sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE1, name='/bin/file1.apk', checksum='SHA1: 11111'))
+    self.unbundled_sbom_doc.add_package(
+      sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+                        name='Unbundled apk package',
+                        supplier=SUPPLIER_GOOGLE,
+                        version=BUILD_FINGER_PRINT))
+    self.unbundled_sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                                    relationship=sbom_data.RelationshipType.GENERATED_FROM,
+                                                                    id2=SPDXID_SOURCE_PACKAGE1))
+
+  def test_tagvalue_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_tagvalue_writer_unbundled(self):
+    with io.StringIO() as output:
+      sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_unbundled.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+  def test_json_writer(self):
+    with io.StringIO() as output:
+      sbom_writers.JSONWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_json_sbom.spdx.json').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
+
+if __name__ == '__main__':
+  unittest.main(verbosity=2)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
new file mode 100644
index 0000000..628615f
--- /dev/null
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -0,0 +1,137 @@
+{
+    "spdxVersion": "SPDX-2.3",
+    "dataLicense": "CC0-1.0",
+    "SPDXID": "SPDXRef-DOCUMENT",
+    "name": "test doc",
+    "documentNamespace": "http://www.google.com/sbom/spdx/android",
+    "creationInfo": {
+        "creators": [
+            "Organization: Google"
+        ],
+        "created": "2023-03-31T22:17:58Z"
+    },
+    "externalDocumentRefs": [
+        {
+            "externalDocumentId": "DocumentRef-external_doc_ref",
+            "spdxDocument": "external_doc_uri",
+            "checksum": {
+                "algorithm": "SHA1",
+                "checksumValue": "1234567890"
+            }
+        }
+    ],
+    "documentDescribes": [
+        "SPDXRef-PRODUCT"
+    ],
+    "packages": [
+        {
+            "name": "PRODUCT",
+            "SPDXID": "SPDXRef-PRODUCT",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": true,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "packageVerificationCode": {
+                "packageVerificationCodeValue": "123456"
+            },
+            "hasFiles": [
+                "SPDXRef-file1",
+                "SPDXRef-file2",
+                "SPDXRef-file3"
+            ]
+        },
+        {
+            "name": "PLATFORM",
+            "SPDXID": "SPDXRef-PLATFORM",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Prebuilt package1",
+            "SPDXID": "SPDXRef-PREBUILT-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google"
+        },
+        {
+            "name": "Source package1",
+            "SPDXID": "SPDXRef-SOURCE-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "build_finger_print",
+            "supplier": "Organization: Google",
+            "externalRefs": [
+                {
+                    "referenceCategory": "SECURITY",
+                    "referenceType": "cpe22Type",
+                    "referenceLocator": "cpe:/a:jsoncpp_project:jsoncpp:1.9.4"
+                }
+            ]
+        },
+        {
+            "name": "Upstream package1",
+            "SPDXID": "SPDXRef-UPSTREAM-package1",
+            "downloadLocation": "NONE",
+            "filesAnalyzed": false,
+            "versionInfo": "1.1",
+            "supplier": "Organization: upstream"
+        }
+    ],
+    "files": [
+        {
+            "fileName": "/bin/file1",
+            "SPDXID": "SPDXRef-file1",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "11111"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file2",
+            "SPDXID": "SPDXRef-file2",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "22222"
+                }
+            ]
+        },
+        {
+            "fileName": "/bin/file3",
+            "SPDXID": "SPDXRef-file3",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "33333"
+                }
+            ]
+        }
+    ],
+    "relationships": [
+        {
+            "spdxElementId": "SPDXRef-file1",
+            "relatedSpdxElement": "SPDXRef-PLATFORM",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file2",
+            "relatedSpdxElement": "SPDXRef-PREBUILT-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-file3",
+            "relatedSpdxElement": "SPDXRef-SOURCE-package1",
+            "relationshipType": "GENERATED_FROM"
+        },
+        {
+            "spdxElementId": "SPDXRef-SOURCE-package1",
+            "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
+            "relationshipType": "VARIANT_OF"
+        }
+    ]
+}
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
new file mode 100644
index 0000000..0f1c6f8
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -0,0 +1,65 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
new file mode 100644
index 0000000..a00c291
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -0,0 +1,12 @@
+FileName: /bin/file1.apk
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+PackageName: Unbundled apk package
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/test_post_process_props.py b/tools/test_post_process_props.py
index 236f9ed..439fc9f 100644
--- a/tools/test_post_process_props.py
+++ b/tools/test_post_process_props.py
@@ -256,6 +256,7 @@
     with contextlib.redirect_stderr(stderr_redirect):
       props = PropList("hello")
       props.put("ro.board.first_api_level","25")
+      props.put("ro.build.version.codename", "REL")
 
       # ro.board.first_api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 20))
@@ -273,5 +274,10 @@
       # ro.board.api_level must be less than or equal to the sdk version
       self.assertFalse(validate_grf_props(props, 25))
 
+      # allow setting future api_level before release
+      props.get_all_props()[-2].make_as_comment()
+      props.put("ro.build.version.codename", "NonRel")
+      self.assertTrue(validate_grf_props(props, 24))
+
 if __name__ == '__main__':
     unittest.main(verbosity=2)