[automerger skipped] Remove jars from suites that aren't used am: de63375a93 -s ours

am skip reason: Merged-In I01ea97bee5c9502eebda7e09eb617854e1aaf1a8 with SHA-1 0edf97bc00 is already in history

Original change: https://android-review.googlesource.com/c/platform/build/+/3427928

Change-Id: Ie6a9b352f4fdf881014848c7a5b84b9ce94aece1
Signed-off-by: Automerger Merge Worker <android-build-automerger-merge-worker@system.gserviceaccount.com>
diff --git a/OWNERS b/OWNERS
index 97fda40..3209665 100644
--- a/OWNERS
+++ b/OWNERS
@@ -4,4 +4,3 @@
 # round the world timzeone coverage.
 per-file envsetup.sh = joeo@google.com, jingwen@google.com, lberki@google.com
 per-file shell_utils.sh = joeo@google.com, jingwen@google.com, lberki@google.com
-
diff --git a/core/Makefile b/core/Makefile
index 4208672..38dc37b 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4883,7 +4883,9 @@
 endif # INSTALLED_BOOTIMAGE_TARGET
 endif # my_board_extracted_kernel
 
-ifneq ($(my_board_extracted_kernel),true)
+ifeq ($(my_board_extracted_kernel),true)
+$(call dist-for-goals, droid_targets, $(BUILT_KERNEL_VERSION_FILE))
+else
 $(warning Neither INSTALLED_KERNEL_TARGET nor INSTALLED_BOOTIMAGE_TARGET is defined when \
     PRODUCT_OTA_ENFORCE_VINTF_KERNEL_REQUIREMENTS  is true. Information about the updated kernel \
     cannot be built into OTA update package. You can fix this by: \
@@ -6586,6 +6588,14 @@
 ifeq (,$(TARGET_BUILD_UNBUNDLED))
   $(JACOCO_REPORT_CLASSES_ALL): $(INTERNAL_ALLIMAGES_FILES)
 endif
+
+# This is not ideal, but it is difficult to correctly figure out the actual jacoco report
+# jars we need to add here as dependencies, so we add the device-tests as a dependency when
+# the env variable is set and this should guarantee thaat all the jacoco report jars are ready
+# when we package the final report jar here.
+ifeq ($(JACOCO_PACKAGING_INCLUDE_DEVICE_TESTS),true)
+  $(JACOCO_REPORT_CLASSES_ALL): $(COMPATIBILITY.device-tests.FILES)
+endif
 endif # EMMA_INSTRUMENT=true
 
 
diff --git a/core/OWNERS b/core/OWNERS
index eb1d5c3..dbd7066 100644
--- a/core/OWNERS
+++ b/core/OWNERS
@@ -3,5 +3,5 @@
 per-file proguard*.flags = jdduke@google.com
 
 # For version updates
-per-file version_defaults.mk = aseaton@google.com,lubomir@google.com,pscovanner@google.com,bkhalife@google.com,jainne@google.com
+per-file version_defaults.mk = ankurbakshi@google.com,bkhalife@google.com,jainne@google.com,lokeshgoel@google.com,lubomir@google.com,pscovanner@google.com
 
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index b5f7b22..6bac52b 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -40,7 +40,7 @@
 # Default behavior for the tree wrt building modules or using prebuilts. This
 # can always be overridden by setting the environment variable
 # MODULE_BUILD_FROM_SOURCE.
-BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := false
 
 ifneq ($(SANITIZE_TARGET)$(EMMA_INSTRUMENT_FRAMEWORK),)
   # Always use sources when building the framework with Java coverage or
@@ -49,6 +49,18 @@
   BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
 endif
 
+ifneq ($(CLANG_COVERAGE)$(NATIVE_COVERAGE_PATHS),)
+  # Always use sources when building with clang coverage and native coverage.
+  # It is possible that there are certain situations when building with coverage
+  # would work with prebuilts, e.g. when the coverage is not being applied to
+  # modules for which we provide prebuilts. Unfortunately, determining that
+  # would require embedding knowledge of which coverage paths affect which
+  # modules here. That would duplicate a lot of information, add yet another
+  # location  module authors have to update and complicate the logic here.
+  # For nowe we will just always build from sources when doing coverage builds.
+  BRANCH_DEFAULT_MODULE_BUILD_FROM_SOURCE := true
+endif
+
 # ART does not provide linux_bionic variants needed for products that
 # set HOST_CROSS_OS=linux_bionic.
 ifeq (linux_bionic,${HOST_CROSS_OS})
@@ -75,9 +87,9 @@
 
 ifneq (,$(MODULE_BUILD_FROM_SOURCE))
   # Keep an explicit setting.
-else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES)))
+else ifeq (,$(filter docs sdk win_sdk sdk_addon,$(MAKECMDGOALS))$(findstring com.google.android.conscrypt,$(PRODUCT_PACKAGES))$(findstring com.google.android.go.conscrypt,$(PRODUCT_PACKAGES)))
   # Prebuilt module SDKs require prebuilt modules to work, and currently
-  # prebuilt modules are only provided for com.google.android.xxx. If we can't
+  # prebuilt modules are only provided for com.google.android(.go)?.xxx. If we can't
   # find one of them in PRODUCT_PACKAGES then assume com.android.xxx are in use,
   # and disable prebuilt SDKs. In particular this applies to AOSP builds.
   #
diff --git a/core/build_id.mk b/core/build_id.mk
index 2f9c3f3..8a68056 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,4 +18,4 @@
 # (like "CRB01").  It must be a single word, and is
 # capitalized by convention.
 
-BUILD_ID=MASTER
+BUILD_ID=UDC
diff --git a/core/main.mk b/core/main.mk
index cb4dca6..04b6b33 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -2157,13 +2157,21 @@
 #       is_kernel_modules_blocklist: modules.blocklist created for _dlkm partitions, see macro build-image-kernel-modules-dir in Makefile.
 #       is_fsverity_build_manifest_apk: BuildManifest<part>.apk files for system and system_ext partition, see ALL_FSVERITY_BUILD_MANIFEST_APK in Makefile.
 #       is_linker_config: see SYSTEM_LINKER_CONFIG and vendor_linker_config_file in Makefile.
+#   build_output_path: the path of the built file, used to calculate checksum
+#   static_libraries/whole_static_libraries: list of module name of the static libraries the file links against, e.g. libclang_rt.builtins or libclang_rt.builtins_32
+#       Info of all static libraries of all installed files are collected in variable _all_static_libs that is used to list all the static library files in sbom-metadata.csv.
+#       See the second foreach loop in the rule of sbom-metadata.csv for the detailed info of static libraries collected in _all_static_libs.
+#   is_static_lib: whether the file is a static library
 
+metadata_list := $(OUT_DIR)/.module_paths/METADATA.list
+metadata_files := $(subst $(newline),$(space),$(file <$(metadata_list)))
 # (TODO: b/272358583 find another way of always rebuilding this target)
 # Remove the sbom-metadata.csv whenever makefile is evaluated
 $(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
-$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
+$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files) $(metadata_list) $(metadata_files)
 	rm -f $@
-	@echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated,build_output_path >> $@
+	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $@
+	$(eval _all_static_libs :=)
 	$(foreach f,$(installed_files),\
 	  $(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
 	  $(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
@@ -2185,11 +2193,25 @@
 	  $(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
 	  $(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
 	  $(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
-	  @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(_build_output_path) >> $@ $(newline) \
+	  $(eval _static_libs := $(ALL_INSTALLED_FILES.$f.STATIC_LIBRARIES)) \
+	  $(eval _whole_static_libs := $(ALL_INSTALLED_FILES.$f.WHOLE_STATIC_LIBRARIES)) \
+	  $(foreach l,$(_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
+	  $(foreach l,$(_whole_static_libs),$(eval _all_static_libs += $l:$(strip $(sort $(ALL_MODULES.$l.PATH))):$(strip $(sort $(ALL_MODULES.$l.SOONG_MODULE_TYPE))):$(ALL_STATIC_LIBRARIES.$l.BUILT_FILE))) \
+	  echo /$(_path_on_device),$(_module_path),$(_soong_module_type),$(_is_prebuilt_make_module),$(_product_copy_files),$(_kernel_module_copy_files),$(_is_platform_generated),$(_build_output_path),$(_static_libs),$(_whole_static_libs), >> $@; \
 	  $(if $(_post_installed_dexpreopt_zip), \
-	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i >> $@ ; done $(newline) \
+	  for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i$(comma)$(_static_libs)$(comma)$(_whole_static_libs)$(comma) >> $@ ; done ; \
 	  ) \
 	)
+	$(foreach l,$(sort $(_all_static_libs)), \
+	  $(eval _lib_stem := $(call word-colon,1,$l)) \
+	  $(eval _module_path := $(call word-colon,2,$l)) \
+	  $(eval _soong_module_type := $(call word-colon,3,$l)) \
+	  $(eval _built_file := $(call word-colon,4,$l)) \
+	  $(eval _static_libs := $(ALL_STATIC_LIBRARIES.$l.STATIC_LIBRARIES)) \
+	  $(eval _whole_static_libs := $(ALL_STATIC_LIBRARIES.$l.WHOLE_STATIC_LIBRARIES)) \
+	  $(eval _is_static_lib := Y) \
+	  echo $(_lib_stem).a,$(_module_path),$(_soong_module_type),,,,,$(_built_file),$(_static_libs),$(_whole_static_libs),$(_is_static_lib) >> $@; \
+	)
 
 .PHONY: sbom
 ifeq ($(TARGET_BUILD_APPS),)
@@ -2201,17 +2223,47 @@
 
 $(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
 else
-apps_only_sbom_files := $(sort $(patsubst %,%.spdx.json,$(filter %.apk,$(apps_only_installed_files))))
-$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
-	rm -rf $@
-	$(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --unbundled_apk
+# Create build rules for generating SBOMs of unbundled APKs and APEXs
+# $1: sbom file
+# $2: sbom fragment file
+# $3: installed file
+# $4: sbom-metadata.csv file
+define generate-app-sbom
+$(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$(3)))
+$(eval _module_name := $(ALL_INSTALLED_FILES.$(3)))
+$(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH))))
+$(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE))))
+$(eval _dep_modules := $(filter %.$(_module_name),$(ALL_MODULES)) $(filter %.$(_module_name)$(TARGET_2ND_ARCH_MODULE_SUFFIX),$(ALL_MODULES)))
+$(eval _is_apex := $(filter %.apex,$(3)))
+
+$(4): $(3) $(metadata_list) $(metadata_files)
+	rm -rf $$@
+	echo installed_file,module_path,soong_module_type,is_prebuilt_make_module,product_copy_files,kernel_module_copy_files,is_platform_generated,build_output_path,static_libraries,whole_static_libraries,is_static_lib >> $$@
+	echo /$(_path_on_device),$(_module_path),$(_soong_module_type),,,,,$(3),,, >> $$@
+	$(if $(filter %.apex,$(3)),\
+	  $(foreach m,$(_dep_modules),\
+	    echo $(patsubst $(PRODUCT_OUT)/apex/$(_module_name)/%,%,$(ALL_MODULES.$m.INSTALLED)),$(sort $(ALL_MODULES.$m.PATH)),$(sort $(ALL_MODULES.$m.SOONG_MODULE_TYPE)),,,,,$(strip $(ALL_MODULES.$m.BUILT)),,, >> $$@;))
+
+$(2): $(1)
+$(1): $(4) $(GEN_SBOM)
+	rm -rf $$@
+	$(GEN_SBOM) --output_file $$@ --metadata $(4) --build_version $$(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json $(if $(filter %.apk,$(3)),--unbundled_apk,--unbundled_apex)
+endef
+
+apps_only_sbom_files :=
+apps_only_fragment_files :=
+$(foreach f,$(filter %.apk %.apex,$(installed_files)), \
+  $(eval _metadata_csv_file := $(patsubst %,%-sbom-metadata.csv,$f)) \
+  $(eval _sbom_file := $(patsubst %,%.spdx.json,$f)) \
+  $(eval _fragment_file := $(patsubst %,%-fragment.spdx,$f)) \
+  $(eval apps_only_sbom_files += $(_sbom_file)) \
+  $(eval apps_only_fragment_files += $(_fragment_file)) \
+  $(eval $(call generate-app-sbom,$(_sbom_file),$(_fragment_file),$f,$(_metadata_csv_file))) \
+)
 
 sbom: $(apps_only_sbom_files)
 
-$(foreach f,$(apps_only_sbom_files),$(eval $(patsubst %.spdx.json,%-fragment.spdx,$f): $f))
-apps_only_fragment_files := $(patsubst %.spdx.json,%-fragment.spdx,$(apps_only_sbom_files))
 $(foreach f,$(apps_only_fragment_files),$(eval apps_only_fragment_dist_files += :sbom/$(notdir $f)))
-
 $(foreach f,$(apps_only_sbom_files),$(eval apps_only_sbom_dist_files += :sbom/$(notdir $f)))
 $(call dist-for-goals,apps_only,$(join $(apps_only_sbom_files),$(apps_only_sbom_dist_files)) $(join $(apps_only_fragment_files),$(apps_only_fragment_dist_files)))
 endif
diff --git a/core/robolectric_test_config_template.xml b/core/robolectric_test_config_template.xml
index 56d2312..18f8ec0 100644
--- a/core/robolectric_test_config_template.xml
+++ b/core/robolectric_test_config_template.xml
@@ -33,5 +33,11 @@
         <option name="java-flags" value="--add-opens=java.base/jdk.internal.util.random=ALL-UNNAMED"/>
         <!-- b/251387255 -->
         <option name="java-flags" value="--add-opens=java.base/java.io=ALL-UNNAMED"/>
+        <option name="java-flags" value="-Drobolectric.offline=true"/>
+        <option name="java-flags" value="-Drobolectric.logging=stdout"/>
+        <option name="java-flags" value="-Drobolectric.resourcesMode=binary"/>
+        <option name="java-flags" value="-Drobolectric.usePreinstrumentedJars=false"/>
+        <option name="java-flags" value="-Drobolectric.conscryptMode=OFF"/>
+        <option name="java-flags" value="-Drobolectric.logging.enabled=true"/>
     </test>
 </configuration>
diff --git a/core/sbom.mk b/core/sbom.mk
index e23bbc1..39c251a 100644
--- a/core/sbom.mk
+++ b/core/sbom.mk
@@ -3,9 +3,20 @@
 # unless a .mk file changes its installed file after including base_rules.mk.
 
 ifdef my_register_name
+  # ALL_INSTALLED_FILES.$(installed_file).STATIC_LIBRARIES: list of module name of static libraries, e.g. libc++demangle libclang_rt.builtins, for primary arch
+  # ALL_INSTALLED_FILES.$(installed_file).WHOLE_STATIC_LIBRARIES: list of module name of static libraries, e.g. libc++demangle_32 libclang_rt.builtins_32, for 2nd arch.
   ifneq (, $(strip $(ALL_MODULES.$(my_register_name).INSTALLED)))
     $(foreach installed_file,$(ALL_MODULES.$(my_register_name).INSTALLED),\
       $(eval ALL_INSTALLED_FILES.$(installed_file) := $(my_register_name))\
+      $(eval ALL_INSTALLED_FILES.$(installed_file).STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_STATIC_LIBRARIES))),$l$(if $(LOCAL_2ND_ARCH_VAR_PREFIX),$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))))\
+      $(eval ALL_INSTALLED_FILES.$(installed_file).WHOLE_STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_WHOLE_STATIC_LIBRARIES))),$l$(if $(LOCAL_2ND_ARCH_VAR_PREFIX),$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))))\
     )
   endif
+  ifeq (STATIC_LIBRARIES,$(LOCAL_MODULE_CLASS))
+  ALL_STATIC_LIBRARIES.$(my_register_name).STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_STATIC_LIBRARIES))),$l$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))
+  ALL_STATIC_LIBRARIES.$(my_register_name).WHOLE_STATIC_LIBRARIES := $(foreach l,$(strip $(sort $(LOCAL_WHOLE_STATIC_LIBRARIES))),$l$($(my_prefix)2ND_ARCH_MODULE_SUFFIX))
+  ifdef LOCAL_SOONG_MODULE_TYPE
+    ALL_STATIC_LIBRARIES.$(my_register_name).BUILT_FILE := $(LOCAL_PREBUILT_MODULE_FILE)
+  endif
+  endif
 endif
\ No newline at end of file
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 8dc4257..d03cfb1 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -104,7 +104,7 @@
     #  It must be of the form "YYYY-MM-DD" on production devices.
     #  It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
     #  If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-    PLATFORM_SECURITY_PATCH := 2023-08-05
+    PLATFORM_SECURITY_PATCH := 2025-02-01
 endif
 
 include $(BUILD_SYSTEM)/version_util.mk
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 3b97792..9d102ea 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -91,3 +91,6 @@
 # Additional settings used in all GSI builds
 PRODUCT_PRODUCT_PROPERTIES += \
     ro.crypto.metadata_init_delete_all_keys.enabled=false \
+
+# Window Extensions
+$(call inherit-product, $(SRC_TARGET_DIR)/product/window_extensions.mk)
\ No newline at end of file
diff --git a/tools/releasetools/Android.bp b/tools/releasetools/Android.bp
index a76dc8a..d07292a 100644
--- a/tools/releasetools/Android.bp
+++ b/tools/releasetools/Android.bp
@@ -236,6 +236,9 @@
         "rangelib.py",
         "sparse_img.py",
     ],
+    data: [
+        ":zip2zip",
+    ],
     // Only the tools that are referenced directly are listed as required modules. For example,
     // `avbtool` is not here, as the script always uses the one from info_dict['avb_avbtool'].
     required: [
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index d935607..a7b3523 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -241,7 +241,8 @@
     # Signer (minSdkVersion=24, maxSdkVersion=32) certificate SHA-1 digest: 19da94896ce4078c38ca695701f1dec741ec6d67
     # ...
     certs_info = {}
-    certificate_regex = re.compile(r"(Signer (?:#[0-9]+|\(.*\))) (certificate .*):(.*)")
+    certificate_regex = re.compile(
+        r"(Signer (?:#[0-9]+|\(.*\))) (certificate .*):(.*)")
     for line in output.splitlines():
       m = certificate_regex.match(line)
       if not m:
@@ -312,7 +313,7 @@
     # This is the list of wildcards of files we extract from |filename|.
     apk_extensions = ['*.apk', '*.apex']
 
-    with zipfile.ZipFile(filename) as input_zip:
+    with zipfile.ZipFile(filename, "r") as input_zip:
       self.certmap, compressed_extension = common.ReadApkCerts(input_zip)
     if compressed_extension:
       apk_extensions.append('*.apk' + compressed_extension)
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index b50caaa..abedecf 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -35,6 +35,7 @@
 import shutil
 import subprocess
 import sys
+import stat
 import tempfile
 import threading
 import time
@@ -2102,6 +2103,26 @@
     shutil.copyfileobj(in_file, out_file)
 
 
+def UnzipSingleFile(input_zip: zipfile.ZipFile, info: zipfile.ZipInfo, dirname: str):
+  # According to https://stackoverflow.com/questions/434641/how-do-i-set-permissions-attributes-on-a-file-in-a-zip-file-using-pythons-zip/6297838#6297838
+  # higher bits of |external_attr| are unix file permission and types
+  unix_filetype = info.external_attr >> 16
+
+  def CheckMask(a, mask):
+    return (a & mask) == mask
+
+  def IsSymlink(a):
+    return CheckMask(a, stat.S_IFLNK)
+  # python3.11 zipfile implementation doesn't handle symlink correctly
+  if not IsSymlink(unix_filetype):
+    return input_zip.extract(info, dirname)
+  if dirname is None:
+    dirname = os.getcwd()
+  target = os.path.join(dirname, info.filename)
+  os.makedirs(os.path.dirname(target), exist_ok=True)
+  os.symlink(input_zip.read(info).decode(), target)
+
+
 def UnzipToDir(filename, dirname, patterns=None):
   """Unzips the archive to the given directory.
 
@@ -2112,20 +2133,46 @@
         archvie. Non-matching patterns will be filtered out. If there's no match
         after the filtering, no file will be unzipped.
   """
-  cmd = ["unzip", "-o", "-q", filename, "-d", dirname]
-  if patterns is not None:
+  with zipfile.ZipFile(filename, allowZip64=True, mode="r") as input_zip:
     # Filter out non-matching patterns. unzip will complain otherwise.
-    with zipfile.ZipFile(filename, allowZip64=True) as input_zip:
-      names = input_zip.namelist()
-    filtered = [
-        pattern for pattern in patterns if fnmatch.filter(names, pattern)]
+    entries = input_zip.infolist()
+    # b/283033491
+    # Per https://en.wikipedia.org/wiki/ZIP_(file_format)#Central_directory_file_header
+    # In zip64 mode, central directory record's header_offset field might be
+    # set to 0xFFFFFFFF if header offset is > 2^32. In this case, the extra
+    # fields will contain an 8 byte little endian integer at offset 20
+    # to indicate the actual local header offset.
+    # As of python3.11, python does not handle zip64 central directories
+    # correctly, so we will manually do the parsing here.
 
-    # There isn't any matching files. Don't unzip anything.
-    if not filtered:
-      return
-    cmd.extend(filtered)
+    # ZIP64 central directory extra field has two required fields:
+    # 2 bytes header ID and 2 bytes size field. Thes two require fields have
+    # a total size of 4 bytes. Then it has three other 8 bytes field, followed
+    # by a 4 byte disk number field. The last disk number field is not required
+    # to be present, but if it is present, the total size of extra field will be
+    # divisible by 8(because 2+2+4+8*n is always going to be multiple of 8)
+    # Most extra fields are optional, but when they appear, their must appear
+    # in the order defined by zip64 spec. Since file header offset is the 2nd
+    # to last field in zip64 spec, it will only be at last 8 bytes or last 12-4
+    # bytes, depending on whether disk number is present.
+    for entry in entries:
+      if entry.header_offset == 0xFFFFFFFF:
+        if len(entry.extra) % 8 == 0:
+          entry.header_offset = int.from_bytes(entry.extra[-12:-4], "little")
+        else:
+          entry.header_offset = int.from_bytes(entry.extra[-8:], "little")
+    if patterns is not None:
+      filtered = [info for info in entries if any(
+          [fnmatch.fnmatch(info.filename, p) for p in patterns])]
 
-  RunAndCheckOutput(cmd)
+      # There isn't any matching files. Don't unzip anything.
+      if not filtered:
+        return
+      for info in filtered:
+        UnzipSingleFile(input_zip, info, dirname)
+    else:
+      for info in entries:
+        UnzipSingleFile(input_zip, info, dirname)
 
 
 def UnzipTemp(filename, patterns=None):
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 2415f7e..b19be87 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -332,14 +332,6 @@
   return external_doc_ref, packages, relationships
 
 
-def generate_package_verification_code(files):
-  checksums = [file.checksum for file in files]
-  checksums.sort()
-  h = hashlib.sha1()
-  h.update(''.join(checksums).encode(encoding='utf-8'))
-  return h.hexdigest()
-
-
 def save_report(report_file_path, report):
   with open(report_file_path, 'w', encoding='utf-8') as report_file:
     for type, issues in report.items():
@@ -487,20 +479,32 @@
       product_copy_files = installed_file_metadata['product_copy_files']
       kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
       build_output_path = installed_file_metadata['build_output_path']
+      is_static_lib = installed_file_metadata['is_static_lib']
 
       if not installed_file_has_metadata(installed_file_metadata, report):
         continue
-      if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+      if not is_static_lib and not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+        # Ignore non-existing static library files for now since they are not shipped on devices.
         report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
         continue
 
       file_id = new_file_id(installed_file)
-      doc.files.append(
-        sbom_data.File(id=file_id, name=installed_file, checksum=checksum(build_output_path)))
-      if not args.unbundled_apex:
-        product_package.file_ids.append(file_id)
-      elif len(doc.files) > 1:
-          doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
+      # TODO(b/285453664): Soong should report the information of statically linked libraries to Make.
+      # This happens when a different sanitized version of static libraries is used in linking.
+      # As a workaround, use the following SHA1 checksum for static libraries created by Soong, if .a files could not be
+      # located correctly because Soong doesn't report the information to Make.
+      sha1 = 'SHA1: da39a3ee5e6b4b0d3255bfef95601890afd80709'  # SHA1 of empty string
+      if os.path.islink(build_output_path) or os.path.isfile(build_output_path):
+        sha1 = checksum(build_output_path)
+      doc.files.append(sbom_data.File(id=file_id,
+                                      name=installed_file,
+                                      checksum=sha1))
+
+      if not is_static_lib:
+        if not args.unbundled_apex:
+          product_package.file_ids.append(file_id)
+        elif len(doc.files) > 1:
+            doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
 
       if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
         metadata_file_path = get_metadata_file_path(installed_file_metadata)
@@ -544,13 +548,21 @@
                                                     relationship=sbom_data.RelationshipType.GENERATED_FROM,
                                                     id2=sbom_data.SPDXID_PLATFORM))
 
-  if not args.unbundled_apex:
-    product_package.verification_code = generate_package_verification_code(doc.files)
+      # Process static libraries and whole static libraries the installed file links to
+      static_libs = installed_file_metadata['static_libraries']
+      whole_static_libs = installed_file_metadata['whole_static_libraries']
+      all_static_libs = (static_libs + ' ' + whole_static_libs).strip()
+      if all_static_libs:
+        for lib in all_static_libs.split(' '):
+          doc.add_relationship(sbom_data.Relationship(id1=file_id,
+                                                      relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                      id2=new_file_id(lib + '.a')))
 
   if args.unbundled_apex:
     doc.describes = doc.files[0].id
 
   # Save SBOM records to output file
+  doc.generate_packages_verification_code()
   doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
   prefix = args.output_file
   if prefix.endswith('.spdx'):
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
index 14c4eb2..ea38e36 100644
--- a/tools/sbom/sbom_data.py
+++ b/tools/sbom/sbom_data.py
@@ -25,6 +25,7 @@
 
 from dataclasses import dataclass, field
 from typing import List
+import hashlib
 
 SPDXID_DOC = 'SPDXRef-DOCUMENT'
 SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
@@ -81,6 +82,7 @@
   VARIANT_OF = 'VARIANT_OF'
   GENERATED_FROM = 'GENERATED_FROM'
   CONTAINS = 'CONTAINS'
+  STATIC_LINK = 'STATIC_LINK'
 
 
 @dataclass
@@ -122,3 +124,17 @@
     if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
                for r in self.relationships):
       self.relationships.append(rel)
+
+  def generate_packages_verification_code(self):
+    for package in self.packages:
+      if not package.file_ids:
+        continue
+
+      checksums = []
+      for file in self.files:
+        if file.id in package.file_ids:
+          checksums.append(file.checksum)
+      checksums.sort()
+      h = hashlib.sha1()
+      h.update(''.join(checksums).encode(encoding='utf-8'))
+      package.verification_code = h.hexdigest()
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
index 85dee9d..1cb864d 100644
--- a/tools/sbom/sbom_writers.py
+++ b/tools/sbom/sbom_writers.py
@@ -85,7 +85,7 @@
     return headers
 
   @staticmethod
-  def marshal_package(package):
+  def marshal_package(sbom_doc, package, fragment):
     download_location = sbom_data.VALUE_NOASSERTION
     if package.download_location:
       download_location = package.download_location
@@ -107,50 +107,32 @@
           f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
 
     tagvalues.append('')
+
+    if package.id == sbom_doc.describes and not fragment:
+      tagvalues.append(
+          f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+      tagvalues.append('')
+
+    for file in sbom_doc.files:
+      if file.id in package.file_ids:
+        tagvalues += TagValueWriter.marshal_file(file)
+
     return tagvalues
 
   @staticmethod
-  def marshal_described_element(sbom_doc, fragment):
-    if not sbom_doc.describes:
-      return None
-
-    product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
-    if product_package:
-      tagvalues = TagValueWriter.marshal_package(product_package[0])
-      if not fragment:
-        tagvalues.append(
-            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
-
-      tagvalues.append('')
-      return tagvalues
-
-    file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
-    if file:
-      tagvalues = TagValueWriter.marshal_file(file[0])
-      if not fragment:
-        tagvalues.append(
-            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
-
-      return tagvalues
-
-    return None
-
-  @staticmethod
-  def marshal_packages(sbom_doc):
+  def marshal_packages(sbom_doc, fragment):
     tagvalues = []
     marshaled_relationships = []
     i = 0
     packages = sbom_doc.packages
     while i < len(packages):
-      if packages[i].id == sbom_doc.describes:
-        i += 1
-        continue
-
-      if i + 1 < len(packages) \
-          and packages[i].id.startswith('SPDXRef-SOURCE-') \
-          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
-        tagvalues += TagValueWriter.marshal_package(packages[i])
-        tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+      if (i + 1 < len(packages)
+          and packages[i].id.startswith('SPDXRef-SOURCE-')
+          and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-')):
+        # Output SOURCE, UPSTREAM packages and their VARIANT_OF relationship together, so they are close to each other
+        # in SBOMs in tagvalue format.
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i], fragment)
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i + 1], fragment)
         rel = next((r for r in sbom_doc.relationships if
                     r.id1 == packages[i].id and
                     r.id2 == packages[i + 1].id and
@@ -162,7 +144,7 @@
 
         i += 2
       else:
-        tagvalues += TagValueWriter.marshal_package(packages[i])
+        tagvalues += TagValueWriter.marshal_package(sbom_doc, packages[i], fragment)
         i += 1
 
     return tagvalues, marshaled_relationships
@@ -179,12 +161,20 @@
     return tagvalues
 
   @staticmethod
-  def marshal_files(sbom_doc):
+  def marshal_files(sbom_doc, fragment):
     tagvalues = []
+    files_in_packages = []
+    for package in sbom_doc.packages:
+      files_in_packages += package.file_ids
     for file in sbom_doc.files:
-      if file.id == sbom_doc.describes:
+      if file.id in files_in_packages:
         continue
       tagvalues += TagValueWriter.marshal_file(file)
+      if file.id == sbom_doc.describes and not fragment:
+        # Fragment is not a full SBOM document so the relationship DESCRIBES is not applicable.
+        tagvalues.append(
+            f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+        tagvalues.append('')
     return tagvalues
 
   @staticmethod
@@ -208,11 +198,8 @@
     content = []
     if not fragment:
       content += TagValueWriter.marshal_doc_headers(sbom_doc)
-    described_element = TagValueWriter.marshal_described_element(sbom_doc, fragment)
-    if described_element:
-      content += described_element
-    content += TagValueWriter.marshal_files(sbom_doc)
-    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+    content += TagValueWriter.marshal_files(sbom_doc, fragment)
+    tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc, fragment)
     content += tagvalues
     content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
     file.write('\n'.join(content))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
index 361dae6..cf85e01 100644
--- a/tools/sbom/sbom_writers_test.py
+++ b/tools/sbom/sbom_writers_test.py
@@ -31,6 +31,7 @@
 SPDXID_FILE1 = 'SPDXRef-file1'
 SPDXID_FILE2 = 'SPDXRef-file2'
 SPDXID_FILE3 = 'SPDXRef-file3'
+SPDXID_FILE4 = 'SPDXRef-file4'
 
 
 class SBOMWritersTest(unittest.TestCase):
@@ -101,6 +102,8 @@
       sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
     self.sbom_doc.files.append(
       sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+    self.sbom_doc.files.append(
+      sbom_data.File(id=SPDXID_FILE4, name='file4.a', checksum='SHA1: 44444'))
 
     self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
                                                           relationship=sbom_data.RelationshipType.GENERATED_FROM,
@@ -112,6 +115,10 @@
                                                           relationship=sbom_data.RelationshipType.GENERATED_FROM,
                                                           id2=SPDXID_SOURCE_PACKAGE1
                                                           ))
+    self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+                                                          relationship=sbom_data.RelationshipType.STATIC_LINK,
+                                                          id2=SPDXID_FILE4
+                                                          ))
 
     # SBOM fragment of a APK
     self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
@@ -139,6 +146,14 @@
       self.maxDiff = None
       self.assertEqual(expected_output, output.getvalue())
 
+  def test_tagvalue_writer_doc_describes_file(self):
+    with io.StringIO() as output:
+      self.sbom_doc.describes = SPDXID_FILE4
+      sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+      expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_doc_describes_file.spdx').read_text()
+      self.maxDiff = None
+      self.assertEqual(expected_output, output.getvalue())
+
   def test_tagvalue_writer_unbundled(self):
     with io.StringIO() as output:
       sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
index 32715a5..53936c5 100644
--- a/tools/sbom/testdata/expected_json_sbom.spdx.json
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -110,6 +110,16 @@
                     "checksumValue": "33333"
                 }
             ]
+        },
+        {
+            "fileName": "file4.a",
+            "SPDXID": "SPDXRef-file4",
+            "checksums": [
+                {
+                    "algorithm": "SHA1",
+                    "checksumValue": "44444"
+                }
+            ]
         }
     ],
     "relationships": [
@@ -129,6 +139,11 @@
             "relationshipType": "GENERATED_FROM"
         },
         {
+            "spdxElementId": "SPDXRef-file1",
+            "relatedSpdxElement": "SPDXRef-file4",
+            "relationshipType": "STATIC_LINK"
+        },
+        {
             "spdxElementId": "SPDXRef-SOURCE-package1",
             "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
             "relationshipType": "VARIANT_OF"
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
index ee39e82..e6fd17e 100644
--- a/tools/sbom/testdata/expected_tagvalue_sbom.spdx
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -7,6 +7,10 @@
 Created: 2023-03-31T22:17:58Z
 ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
 
+FileName: file4.a
+SPDXID: SPDXRef-file4
+FileChecksum: SHA1: 44444
+
 PackageName: PRODUCT
 SPDXID: SPDXRef-PRODUCT
 PackageDownloadLocation: NONE
@@ -63,3 +67,4 @@
 Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
 Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
 Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
+Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
new file mode 100644
index 0000000..428d7e3
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_doc_describes_file.spdx
@@ -0,0 +1,70 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+FileName: file4.a
+SPDXID: SPDXRef-file4
+FileChecksum: SHA1: 44444
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-file4
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NOASSERTION
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
+Relationship: SPDXRef-file1 STATIC_LINK SPDXRef-file4