Merge "Finalization script fixes."
diff --git a/core/main.mk b/core/main.mk
index df42baa..e84dfaa 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -1252,6 +1252,7 @@
$(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
$(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
$(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+ $(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
$(call auto-included-modules) \
) \
$(eval ### Filter out the overridden packages and executables before doing expansion) \
diff --git a/core/product.mk b/core/product.mk
index 5f1e145..cdc3d09 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -36,6 +36,7 @@
_product_list_vars += PRODUCT_PACKAGES
_product_list_vars += PRODUCT_PACKAGES_DEBUG
_product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ARM64
# Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
_product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
_product_list_vars += PRODUCT_PACKAGES_ENG
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 94b5c16..0f5b8a4 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -296,11 +296,9 @@
system_manifest.xml \
system_compatibility_matrix.xml \
-# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
-ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
- PRODUCT_PACKAGES += \
- libclang_rt.hwasan.bootstrap
-endif
+PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
+ libclang_rt.hwasan.bootstrap \
+ libc_hwasan \
# Jacoco agent JARS to be built and installed, if any.
ifeq ($(EMMA_INSTRUMENT),true)
diff --git a/tools/Android.bp b/tools/Android.bp
index e325f6b..bea0602 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -71,22 +71,6 @@
}
python_binary_host {
- name: "generate-sbom",
- srcs: [
- "generate-sbom.py",
- ],
- version: {
- py3: {
- embedded_launcher: true,
- },
- },
- libs: [
- "metadata_file_proto_py",
- "libprotobuf-python",
- ],
-}
-
-python_binary_host {
name: "list_files",
main: "list_files.py",
srcs: [
diff --git a/tools/generate-sbom.py b/tools/generate-sbom.py
deleted file mode 100755
index 9583395..0000000
--- a/tools/generate-sbom.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Generate the SBOM of the current target product in SPDX format.
-Usage example:
- generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
- --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
- --product_out_dir=out/target/product/vsoc_x86_64 \
- --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
- --product_mfr=Google
-"""
-
-import argparse
-import csv
-import datetime
-import google.protobuf.text_format as text_format
-import hashlib
-import json
-import os
-import metadata_file_pb2
-
-# Common
-SPDXID = 'SPDXID'
-SPDX_VERSION = 'SPDXVersion'
-DATA_LICENSE = 'DataLicense'
-DOCUMENT_NAME = 'DocumentName'
-DOCUMENT_NAMESPACE = 'DocumentNamespace'
-CREATED = 'Created'
-CREATOR = 'Creator'
-EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
-
-# Package
-PACKAGE_NAME = 'PackageName'
-PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
-PACKAGE_VERSION = 'PackageVersion'
-PACKAGE_SUPPLIER = 'PackageSupplier'
-FILES_ANALYZED = 'FilesAnalyzed'
-PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
-PACKAGE_EXTERNAL_REF = 'ExternalRef'
-# Package license
-PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
-PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
-PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
-PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
-
-# File
-FILE_NAME = 'FileName'
-FILE_CHECKSUM = 'FileChecksum'
-# File license
-FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
-FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
-FILE_LICENSE_COMMENTS = 'LicenseComments'
-FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
-FILE_NOTICE = 'FileNotice'
-FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
-
-# Relationship
-RELATIONSHIP = 'Relationship'
-REL_DESCRIBES = 'DESCRIBES'
-REL_VARIANT_OF = 'VARIANT_OF'
-REL_GENERATED_FROM = 'GENERATED_FROM'
-
-# Package type
-PKG_SOURCE = 'SOURCE'
-PKG_UPSTREAM = 'UPSTREAM'
-PKG_PREBUILT = 'PREBUILT'
-
-# Security tag
-NVD_CPE23 = 'NVD-CPE2.3:'
-
-# Report
-ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
-ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
-ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
-ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
-ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
-INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
-
-
-def get_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
- parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
- parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
- parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
- parser.add_argument('--build_version', required=True, help='The build version.')
- parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
- parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
- parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
-
- return parser.parse_args()
-
-
-def log(*info):
- if args.verbose:
- for i in info:
- print(i)
-
-
-def new_doc_header(doc_id):
- return {
- SPDX_VERSION: 'SPDX-2.3',
- DATA_LICENSE: 'CC0-1.0',
- SPDXID: doc_id,
- DOCUMENT_NAME: args.build_version,
- DOCUMENT_NAMESPACE: f'https://www.google.com/sbom/spdx/android/{args.build_version}',
- CREATOR: 'Organization: Google, LLC',
- CREATED: '<timestamp>',
- EXTERNAL_DOCUMENT_REF: [],
- }
-
-
-def new_package_record(id, name, version, supplier, download_location=None, files_analyzed='false', external_refs=[]):
- package = {
- PACKAGE_NAME: name,
- SPDXID: id,
- PACKAGE_DOWNLOAD_LOCATION: download_location if download_location else 'NONE',
- FILES_ANALYZED: files_analyzed,
- }
- if version:
- package[PACKAGE_VERSION] = version
- if supplier:
- package[PACKAGE_SUPPLIER] = f'Organization: {supplier}'
- if external_refs:
- package[PACKAGE_EXTERNAL_REF] = external_refs
-
- return package
-
-
-def new_file_record(id, name, checksum):
- return {
- FILE_NAME: name,
- SPDXID: id,
- FILE_CHECKSUM: checksum
- }
-
-
-def encode_for_spdxid(s):
- """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
- result = ''
- for c in s:
- if c.isalnum() or c in '.-':
- result += c
- elif c in '_@/':
- result += '-'
- else:
- result += '0x' + c.encode('utf-8').hex()
-
- return result.lstrip('-')
-
-
-def new_package_id(package_name, type):
- return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
-
-
-def new_external_doc_ref(package_name, sbom_url, sbom_checksum):
- doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(package_name)}'
- return f'{EXTERNAL_DOCUMENT_REF}: {doc_ref_id} {sbom_url} {sbom_checksum}', doc_ref_id
-
-
-def new_file_id(file_path):
- return f'SPDXRef-{encode_for_spdxid(file_path)}'
-
-
-def new_relationship_record(id1, relationship, id2):
- return f'{RELATIONSHIP}: {id1} {relationship} {id2}'
-
-
-def checksum(file_path):
- file_path = args.product_out_dir + '/' + file_path
- h = hashlib.sha1()
- if os.path.islink(file_path):
- h.update(os.readlink(file_path).encode('utf-8'))
- else:
- with open(file_path, 'rb') as f:
- h.update(f.read())
- return f'SHA1: {h.hexdigest()}'
-
-
-def is_soong_prebuilt_module(file_metadata):
- return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
- 'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
- 'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
- 'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
- 'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
- 'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
- 'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
- 'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
- 'vndk_prebuilt_shared',
-
- # 'android_test_import',
- # 'cc_prebuilt_test_library_shared',
- # 'java_import_host',
- # 'java_test_import',
- # 'llvm_host_prebuilt_library_shared',
- # 'prebuilt_apis',
- # 'prebuilt_build_tool',
- # 'prebuilt_defaults',
- # 'prebuilt_etc',
- # 'prebuilt_etc_host',
- # 'prebuilt_etc_xml',
- # 'prebuilt_font',
- # 'prebuilt_hidl_interfaces',
- # 'prebuilt_platform_compat_config',
- # 'prebuilt_stubs_sources',
- # 'prebuilt_usr_share',
- # 'prebuilt_usr_share_host',
- # 'soong_config_module_type_import',
- ]
-
-
-def is_source_package(file_metadata):
- module_path = file_metadata['module_path']
- return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
-
-
-def is_prebuilt_package(file_metadata):
- module_path = file_metadata['module_path']
- if module_path:
- return (module_path.startswith('prebuilts/') or
- is_soong_prebuilt_module(file_metadata) or
- file_metadata['is_prebuilt_make_module'])
-
- kernel_module_copy_files = file_metadata['kernel_module_copy_files']
- if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
- return True
-
- return False
-
-
-def get_source_package_info(file_metadata, metadata_file_path):
- if not metadata_file_path:
- return file_metadata['module_path'], []
-
- metadata_proto = metadata_file_protos[metadata_file_path]
- external_refs = []
- for tag in metadata_proto.third_party.security.tag:
- if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
- external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe23Type {tag.removeprefix(NVD_CPE23)}')
- elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
- external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe22Type {tag.removeprefix(NVD_CPE23)}')
-
- if metadata_proto.name:
- return metadata_proto.name, external_refs
- else:
- return os.path.basename(metadata_file_path), external_refs # return the directory name only as package name
-
-
-def get_prebuilt_package_name(file_metadata, metadata_file_path):
- name = None
- if metadata_file_path:
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.name:
- name = metadata_proto.name
- else:
- name = metadata_file_path
- elif file_metadata['module_path']:
- name = file_metadata['module_path']
- elif file_metadata['kernel_module_copy_files']:
- src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
- name = os.path.dirname(src_path)
-
- return name.removeprefix('prebuilts/').replace('/', '-')
-
-
-def get_metadata_file_path(file_metadata):
- metadata_path = ''
- if file_metadata['module_path']:
- metadata_path = file_metadata['module_path']
- elif file_metadata['kernel_module_copy_files']:
- metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
-
- while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
- metadata_path = os.path.dirname(metadata_path)
-
- return metadata_path
-
-
-def get_package_version(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- return metadata_proto.third_party.version
-
-
-def get_package_homepage(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.homepage:
- return metadata_proto.third_party.homepage
- for url in metadata_proto.third_party.url:
- if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
- return url.value
-
- return None
-
-
-def get_package_download_location(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.url:
- urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
- if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
- return urls[0].value
- elif len(urls) > 1:
- return urls[1].value
-
- return None
-
-
-def get_sbom_fragments(installed_file_metadata, metadata_file_path):
- external_doc_ref = None
- packages = []
- relationships = []
-
- # Info from METADATA file
- homepage = get_package_homepage(metadata_file_path)
- version = get_package_version(metadata_file_path)
- download_location = get_package_download_location(metadata_file_path)
-
- if is_source_package(installed_file_metadata):
- # Source fork packages
- name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
- source_package_id = new_package_id(name, PKG_SOURCE)
- source_package = new_package_record(source_package_id, name, args.build_version, args.product_mfr,
- external_refs=external_refs)
-
- upstream_package_id = new_package_id(name, PKG_UPSTREAM)
- upstream_package = new_package_record(upstream_package_id, name, version, homepage, download_location)
- packages += [source_package, upstream_package]
- relationships.append(new_relationship_record(source_package_id, REL_VARIANT_OF, upstream_package_id))
- elif is_prebuilt_package(installed_file_metadata):
- # Prebuilt fork packages
- name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
- prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
- prebuilt_package = new_package_record(prebuilt_package_id, name, args.build_version, args.product_mfr)
- packages.append(prebuilt_package)
-
- if metadata_file_path:
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
- sbom_url = metadata_proto.third_party.sbom_ref.url
- sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
- upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
- if sbom_url and sbom_checksum and upstream_element_id:
- external_doc_ref, doc_ref_id = new_external_doc_ref(name, sbom_url, sbom_checksum)
- relationships.append(
- new_relationship_record(prebuilt_package_id, REL_VARIANT_OF, doc_ref_id + ':' + upstream_element_id))
-
- return external_doc_ref, packages, relationships
-
-
-def generate_package_verification_code(files):
- checksums = [file[FILE_CHECKSUM] for file in files]
- checksums.sort()
- h = hashlib.sha1()
- h.update(''.join(checksums).encode(encoding='utf-8'))
- return h.hexdigest()
-
-
-def write_record(f, record):
- if record.__class__.__name__ == 'dict':
- for k, v in record.items():
- if k == EXTERNAL_DOCUMENT_REF or k == PACKAGE_EXTERNAL_REF:
- for ref in v:
- f.write(ref + '\n')
- else:
- f.write('{}: {}\n'.format(k, v))
- elif record.__class__.__name__ == 'str':
- f.write(record + '\n')
- f.write('\n')
-
-
-def write_tagvalue_sbom(all_records):
- with open(args.output_file, 'w', encoding="utf-8") as output_file:
- for rec in all_records:
- write_record(output_file, rec)
-
-
-def write_json_sbom(all_records, product_package_id):
- doc = {}
- product_package = None
- for r in all_records:
- if r.__class__.__name__ == 'dict':
- if DOCUMENT_NAME in r: # Doc header
- doc['spdxVersion'] = r[SPDX_VERSION]
- doc['dataLicense'] = r[DATA_LICENSE]
- doc[SPDXID] = r[SPDXID]
- doc['name'] = r[DOCUMENT_NAME]
- doc['documentNamespace'] = r[DOCUMENT_NAMESPACE]
- doc['creationInfo'] = {
- 'creators': [r[CREATOR]],
- 'created': r[CREATED],
- }
- doc['externalDocumentRefs'] = []
- for ref in r[EXTERNAL_DOCUMENT_REF]:
- # ref is 'ExternalDocumentRef: <doc id> <doc url> SHA1: xxxxx'
- fields = ref.split(' ')
- doc_ref = {
- 'externalDocumentId': fields[1],
- 'spdxDocument': fields[2],
- 'checksum': {
- 'algorithm': fields[3][:-1],
- 'checksumValue': fields[4]
- }
- }
- doc['externalDocumentRefs'].append(doc_ref)
- doc['documentDescribes'] = []
- doc['packages'] = []
- doc['files'] = []
- doc['relationships'] = []
-
- elif PACKAGE_NAME in r: # packages
- package = {
- 'name': r[PACKAGE_NAME],
- SPDXID: r[SPDXID],
- 'downloadLocation': r[PACKAGE_DOWNLOAD_LOCATION],
- 'filesAnalyzed': r[FILES_ANALYZED] == "true"
- }
- if PACKAGE_VERSION in r:
- package['versionInfo'] = r[PACKAGE_VERSION]
- if PACKAGE_SUPPLIER in r:
- package['supplier'] = r[PACKAGE_SUPPLIER]
- if PACKAGE_VERIFICATION_CODE in r:
- package['packageVerificationCode'] = {
- 'packageVerificationCodeValue': r[PACKAGE_VERIFICATION_CODE]
- }
- if PACKAGE_EXTERNAL_REF in r:
- package['externalRefs'] = []
- for ref in r[PACKAGE_EXTERNAL_REF]:
- # ref is 'ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4'
- fields = ref.split(' ')
- ext_ref = {
- 'referenceCategory': fields[1],
- 'referenceType': fields[2],
- 'referenceLocator': fields[3],
- }
- package['externalRefs'].append(ext_ref)
-
- doc['packages'].append(package)
- if r[SPDXID] == product_package_id:
- product_package = package
- product_package['hasFiles'] = []
-
- elif FILE_NAME in r: # files
- file = {
- 'fileName': r[FILE_NAME],
- SPDXID: r[SPDXID]
- }
- checksum = r[FILE_CHECKSUM].split(': ')
- file['checksums'] = [{
- 'algorithm': checksum[0],
- 'checksumValue': checksum[1],
- }]
- doc['files'].append(file)
- product_package['hasFiles'].append(r[SPDXID])
-
- elif r.__class__.__name__ == 'str':
- if r.startswith(RELATIONSHIP):
- # r is 'Relationship: <spdxid> <relationship> <spdxid>'
- fields = r.split(' ')
- rel = {
- 'spdxElementId': fields[1],
- 'relatedSpdxElement': fields[3],
- 'relationshipType': fields[2],
- }
- if fields[2] == REL_DESCRIBES:
- doc['documentDescribes'].append(fields[3])
- else:
- doc['relationships'].append(rel)
-
- with open(args.output_file + '.json', 'w', encoding="utf-8") as output_file:
- output_file.write(json.dumps(doc, indent=4))
-
-
-def save_report(report):
- prefix, _ = os.path.splitext(args.output_file)
- with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
- for type, issues in report.items():
- report_file.write(type + '\n')
- for issue in issues:
- report_file.write('\t' + issue + '\n')
- report_file.write('\n')
-
-
-def sort_rels(rel):
- # rel = 'Relationship file_id GENERATED_FROM package_id'
- fields = rel.split(' ')
- return fields[3] + fields[1]
-
-
-# Validate the metadata generated by Make for installed files and report if there is no metadata.
-def installed_file_has_metadata(installed_file_metadata, report):
- installed_file = installed_file_metadata['installed_file']
- module_path = installed_file_metadata['module_path']
- product_copy_files = installed_file_metadata['product_copy_files']
- kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
- is_platform_generated = installed_file_metadata['is_platform_generated']
-
- if (not module_path and
- not product_copy_files and
- not kernel_module_copy_files and
- not is_platform_generated and
- not installed_file.endswith('.fsv_meta')):
- report[ISSUE_NO_METADATA].append(installed_file)
- return False
-
- return True
-
-
-def report_metadata_file(metadata_file_path, installed_file_metadata, report):
- if metadata_file_path:
- report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
- 'installed_file: {}, module_path: {}, METADATA file: {}'.format(
- installed_file_metadata['installed_file'],
- installed_file_metadata['module_path'],
- metadata_file_path + '/METADATA'))
-
- package_metadata = metadata_file_pb2.Metadata()
- with open(metadata_file_path + '/METADATA', 'rt') as f:
- text_format.Parse(f.read(), package_metadata)
-
- if not metadata_file_path in metadata_file_protos:
- metadata_file_protos[metadata_file_path] = package_metadata
- if not package_metadata.name:
- report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
-
- if not package_metadata.third_party.version:
- report[ISSUE_METADATA_FILE_INCOMPLETE].append(
- f'{metadata_file_path}/METADATA does not has "third_party.version"')
-
- for tag in package_metadata.third_party.security.tag:
- if not tag.startswith(NVD_CPE23):
- report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
- f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
- else:
- report[ISSUE_NO_METADATA_FILE].append(
- "installed_file: {}, module_path: {}".format(
- installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
-
-
-def generate_fragment():
- with open(args.metadata, newline='') as sbom_metadata_file:
- reader = csv.DictReader(sbom_metadata_file)
- for installed_file_metadata in reader:
- installed_file = installed_file_metadata['installed_file']
- if args.output_file != args.product_out_dir + installed_file + ".spdx":
- continue
-
- module_path = installed_file_metadata['module_path']
- package_id = new_package_id(encode_for_spdxid(module_path), PKG_PREBUILT)
- package = new_package_record(package_id, module_path, args.build_version, args.product_mfr)
- file_id = new_file_id(installed_file)
- file = new_file_record(file_id, installed_file, checksum(installed_file))
- relationship = new_relationship_record(file_id, REL_GENERATED_FROM, package_id)
- records = [package, file, relationship]
- write_tagvalue_sbom(records)
- break
-
-
-def main():
- global args
- args = get_args()
- log('Args:', vars(args))
-
- if args.unbundled:
- generate_fragment()
- return
-
- global metadata_file_protos
- metadata_file_protos = {}
-
- doc_id = 'SPDXRef-DOCUMENT'
- doc_header = new_doc_header(doc_id)
-
- product_package_id = 'SPDXRef-PRODUCT'
- product_package = new_package_record(product_package_id, 'PRODUCT', args.build_version, args.product_mfr,
- files_analyzed='true')
-
- platform_package_id = 'SPDXRef-PLATFORM'
- platform_package = new_package_record(platform_package_id, 'PLATFORM', args.build_version, args.product_mfr)
-
- # Report on some issues and information
- report = {
- ISSUE_NO_METADATA: [],
- ISSUE_NO_METADATA_FILE: [],
- ISSUE_METADATA_FILE_INCOMPLETE: [],
- ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
- ISSUE_INSTALLED_FILE_NOT_EXIST: [],
- INFO_METADATA_FOUND_FOR_PACKAGE: [],
- }
-
- # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
- product_files = []
- package_ids = []
- package_records = []
- rels_file_gen_from = []
- with open(args.metadata, newline='') as sbom_metadata_file:
- reader = csv.DictReader(sbom_metadata_file)
- for installed_file_metadata in reader:
- installed_file = installed_file_metadata['installed_file']
- module_path = installed_file_metadata['module_path']
- product_copy_files = installed_file_metadata['product_copy_files']
- kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
-
- if not installed_file_has_metadata(installed_file_metadata, report):
- continue
- file_path = args.product_out_dir + '/' + installed_file
- if not (os.path.islink(file_path) or os.path.isfile(file_path)):
- report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
- continue
-
- file_id = new_file_id(installed_file)
- product_files.append(new_file_record(file_id, installed_file, checksum(installed_file)))
-
- if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
- metadata_file_path = get_metadata_file_path(installed_file_metadata)
- report_metadata_file(metadata_file_path, installed_file_metadata, report)
-
- # File from source fork packages or prebuilt fork packages
- external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
- if len(pkgs) > 0:
- if external_doc_ref and external_doc_ref not in doc_header[EXTERNAL_DOCUMENT_REF]:
- doc_header[EXTERNAL_DOCUMENT_REF].append(external_doc_ref)
- for p in pkgs:
- if not p[SPDXID] in package_ids:
- package_ids.append(p[SPDXID])
- package_records.append(p)
- for rel in rels:
- if not rel in package_records:
- package_records.append(rel)
- fork_package_id = pkgs[0][SPDXID] # The first package should be the source/prebuilt fork package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, fork_package_id))
- elif module_path or installed_file_metadata['is_platform_generated']:
- # File from PLATFORM package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif product_copy_files:
- # Format of product_copy_files: <source path>:<dest path>
- src_path = product_copy_files.split(':')[0]
- # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
- # so process them as files from PLATFORM package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif installed_file.endswith('.fsv_meta'):
- # See build/make/core/Makefile:2988
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif kernel_module_copy_files.startswith('ANDROID-GEN'):
- # For the four files generated for _dlkm, _ramdisk partitions
- # See build/make/core/Makefile:323
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-
- product_package[PACKAGE_VERIFICATION_CODE] = generate_package_verification_code(product_files)
-
- all_records = [
- doc_header,
- product_package,
- new_relationship_record(doc_id, REL_DESCRIBES, product_package_id),
- ]
- all_records += product_files
- all_records.append(platform_package)
- all_records += package_records
- rels_file_gen_from.sort(key=sort_rels)
- all_records += rels_file_gen_from
-
- # Save SBOM records to output file
- doc_header[CREATED] = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
- write_tagvalue_sbom(all_records)
- if args.json:
- write_json_sbom(all_records, product_package_id)
-
- save_report(report)
-
-
-if __name__ == '__main__':
- main()
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e154a0f..e0bcbb7 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -842,13 +842,14 @@
SYSTEM/ after rebuilding recovery.
"""
common.ZipDelete(zip_filename, files_list)
- with zipfile.ZipFile(zip_filename, "a",
+ output_zip = zipfile.ZipFile(zip_filename, "a",
compression=zipfile.ZIP_DEFLATED,
- allowZip64=True) as output_zip:
- for item in files_list:
- file_path = os.path.join(OPTIONS.input_tmp, item)
- assert os.path.exists(file_path)
- common.ZipWrite(output_zip, file_path, arcname=item)
+ allowZip64=True)
+ for item in files_list:
+ file_path = os.path.join(OPTIONS.input_tmp, item)
+ assert os.path.exists(file_path)
+ common.ZipWrite(output_zip, file_path, arcname=item)
+ common.ZipClose(output_zip)
def HasPartition(partition_name):
@@ -1191,7 +1192,7 @@
AddVbmetaDigest(output_zip)
if output_zip:
- output_zip.close()
+ common.ZipClose(output_zip)
if OPTIONS.replace_updated_files_list:
ReplaceUpdatedFiles(output_zip.filename,
OPTIONS.replace_updated_files_list)
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 40f7c92..59c712e 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -431,7 +431,7 @@
apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
- apex_zip.close()
+ common.ZipClose(apex_zip)
# 3. Sign the APEX container with container_key.
signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
@@ -626,7 +626,7 @@
if os.path.isfile(deapexer_path):
deapexer = deapexer_path
- for apex_filename in os.listdir(target_dir):
+ for apex_filename in sorted(os.listdir(target_dir)):
apex_filepath = os.path.join(target_dir, apex_filename)
if not os.path.isfile(apex_filepath) or \
not zipfile.is_zipfile(apex_filepath):
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 97957be..b395c19 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
"""Verifies the payload and metadata signatures in an A/B OTA payload."""
package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
if 'payload.bin' not in package_zip.namelist():
- package_zip.close()
+ common.ZipClose(package_zip)
return
print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
'--in_file=' + payload_file,
'--public_key=' + pubkey]
common.RunAndCheckOutput(cmd)
- package_zip.close()
+ common.ZipClose(package_zip)
# Verified successfully upon reaching here.
print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3904a78..01bc6e1 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -461,6 +461,25 @@
return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
@property
+ def vendor_api_level(self):
+ vendor_prop = self.info_dict.get("vendor.build.prop")
+ if not vendor_prop:
+ return -1
+
+ props = [
+ "ro.board.api_level",
+ "ro.board.first_api_level",
+ "ro.product.first_api_level",
+ ]
+ for prop in props:
+ value = vendor_prop.GetProp(prop)
+ try:
+ return int(value)
+ except:
+ pass
+ return -1
+
+ @property
def is_vabc_xor(self):
vendor_prop = self.info_dict.get("vendor.build.prop")
vabc_xor_enabled = vendor_prop and \
@@ -2809,6 +2828,18 @@
def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
compress_type=None):
+ # http://b/18015246
+ # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
+ # for files larger than 2GiB. We can work around this by adjusting their
+ # limit. Note that `zipfile.writestr()` will not work for strings larger than
+ # 2GiB. The Python interpreter sometimes rejects strings that large (though
+ # it isn't clear to me exactly what circumstances cause this).
+ # `zipfile.write()` must be used directly to work around this.
+ #
+ # This mess can be avoided if we port to python3.
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
if compress_type is None:
compress_type = zip_file.compression
if arcname is None:
@@ -2834,13 +2865,14 @@
finally:
os.chmod(filename, saved_stat.st_mode)
os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
compress_type=None):
"""Wrap zipfile.writestr() function to work around the zip64 limit.
- Python's zip implementation won't allow writing a string
+ Even with the ZIP64_LIMIT workaround, it won't allow writing a string
longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
when calling crc32(bytes).
@@ -2849,6 +2881,9 @@
when we know the string won't be too long.
"""
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
zinfo.compress_type = zip_file.compression
@@ -2881,6 +2916,7 @@
zinfo.date_time = (2009, 1, 1, 0, 0, 0)
zip_file.writestr(zinfo, data)
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipDelete(zip_filename, entries, force=False):
@@ -2913,6 +2949,18 @@
os.replace(new_zipfile, zip_filename)
+def ZipClose(zip_file):
+ # http://b/18015246
+ # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
+ # central directory.
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
+ zip_file.close()
+
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
+
+
class DeviceSpecificParams(object):
module = None
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 8f93688..ba2b14f 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -165,6 +165,19 @@
pass
+def include_meta_in_list(item_list):
+ """Include all `META/*` files in the item list.
+
+ To ensure that `AddImagesToTargetFiles` can still be used with vendor item
+ list that do not specify all of the required META/ files, those files should
+ be included by default. This preserves the backward compatibility of
+ `rebuild_image_with_sepolicy`.
+ """
+ if not item_list:
+ return None
+ return list(item_list) + ['META/*']
+
+
def create_merged_package(temp_dir):
"""Merges two target files packages into one target files structure.
@@ -276,7 +289,7 @@
merge_utils.CollectTargetFiles(
input_zipfile_or_dir=OPTIONS.vendor_target_files,
output_dir=vendor_target_files_dir,
- item_list=OPTIONS.vendor_item_list)
+ item_list=include_meta_in_list(OPTIONS.vendor_item_list))
# Copy the partition contents from the merged target-files archive to the
# vendor target-files archive.
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 7078d67..c4fd809 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -272,7 +272,7 @@
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
- output_zip.close()
+ common.ZipClose(output_zip)
needed_property_files = (
NonAbOtaPropertyFiles(),
@@ -526,7 +526,7 @@
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- output_zip.close()
+ common.ZipClose(output_zip)
# Sign the generated zip package unless no_signing is specified.
needed_property_files = (
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2458244..df283d6 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -495,7 +495,7 @@
else:
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
- target_zip.close()
+ common.ZipClose(target_zip)
return target_file
@@ -632,7 +632,7 @@
# TODO(xunchang) handle META/postinstall_config.txt'
- partial_target_zip.close()
+ common.ZipClose(partial_target_zip)
return partial_target_file
@@ -717,7 +717,7 @@
# Write new ab_partitions.txt file
common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
- target_zip.close()
+ common.ZipClose(target_zip)
return target_file
@@ -909,6 +909,19 @@
logger.info(
"VABC Compression algorithm is set to 'none', disabling VABC xor")
OPTIONS.enable_vabc_xor = False
+
+ if OPTIONS.enable_vabc_xor:
+ api_level = -1
+ if source_info is not None:
+ api_level = source_info.vendor_api_level
+ if api_level == -1:
+ api_level = target_info.vendor_api_level
+
+ # XOR is only supported on T and higher.
+ if api_level < 33:
+ logger.error("VABC XOR not supported on this vendor, disabling")
+ OPTIONS.enable_vabc_xor = False
+
additional_args = []
# Prepare custom images.
@@ -1052,11 +1065,11 @@
common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
compress_type=zipfile.ZIP_STORED)
- target_zip.close()
+ common.ZipClose(target_zip)
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- output_zip.close()
+ common.ZipClose(output_zip)
FinalizeMetadata(metadata, staging_file, output_file,
package_key=OPTIONS.package_key)
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 8c26114..985aeda 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,7 @@
import ota_metadata_pb2
import common
-from common import (ZipDelete, OPTIONS, MakeTempFile,
+from common import (ZipDelete, ZipClose, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
GetRamdiskFormat, ParseUpdateEngineConfig)
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4a12e74..8291448 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -908,7 +908,7 @@
certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
for k in keys:
common.ZipWrite(certs_zip, k)
- certs_zip.close()
+ common.ZipClose(certs_zip)
common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
@@ -1545,8 +1545,8 @@
platform_api_level, codename_to_api_level_map,
compressed_extension)
- input_zip.close()
- output_zip.close()
+ common.ZipClose(input_zip)
+ common.ZipClose(output_zip)
if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 0e4626b..2dfd8c7 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
build_info = common.BuildInfo(info_dict)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/build-id/'
- 'version-incremental:build-type/build-tags', build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/build-id/'
+ 'version-incremental:build-type/build-tags', build_info.fingerprint)
build_props = info_dict['build.prop'].build_props
del build_props['ro.build.id']
build_props['ro.build.legacy.id'] = 'legacy-build-id'
build_info = common.BuildInfo(info_dict, use_legacy_id=True)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
False)
@@ -241,9 +241,9 @@
info_dict['vbmeta_digest'] = 'abcde12345'
build_info = common.BuildInfo(info_dict, use_legacy_id=False)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
def test___getitem__(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
info_dict['build.prop'].build_props[
'ro.product.property_source_order'] = 'bad-source'
with self.assertRaisesRegexp(common.ExternalError,
- 'Invalid ro.product.property_source_order'):
+ 'Invalid ro.product.property_source_order'):
info = common.BuildInfo(info_dict, None)
info.GetBuildProp('ro.product.device')
@@ -461,7 +461,7 @@
os.utime(test_file_name, (1234567, 1234567))
expected_stat = os.stat(test_file_name)
common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
test_file_name, expected_stat, expected_mode,
@@ -494,7 +494,7 @@
expected_mode = extra_args.get("perms", zinfo_perms)
common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
expected_mode=expected_mode,
@@ -538,7 +538,7 @@
common.ZipWrite(zip_file, test_file_name, **extra_args)
common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
- zip_file.close()
+ common.ZipClose(zip_file)
# Verify the contents written by ZipWrite().
self._verify(zip_file, zip_file_name, arcname_large,
@@ -553,6 +553,12 @@
os.remove(zip_file_name)
os.remove(test_file_name)
+ def _test_reset_ZIP64_LIMIT(self, func, *args):
+ default_limit = (1 << 31) - 1
+ self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+ func(*args)
+ self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+
def test_ZipWrite(self):
file_contents = os.urandom(1024)
self._test_ZipWrite(file_contents)
@@ -577,7 +583,7 @@
})
def test_ZipWrite_resets_ZIP64_LIMIT(self):
- self._test_ZipWrite("")
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
def test_ZipWriteStr(self):
random_string = os.urandom(1024)
@@ -628,9 +634,9 @@
})
def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
- self._test_ZipWriteStr('foo', b'')
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
zinfo = zipfile.ZipInfo(filename="foo")
- self._test_ZipWriteStr(zinfo, b'')
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
def test_bug21309935(self):
zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -652,7 +658,7 @@
zinfo = zipfile.ZipInfo(filename="qux")
zinfo.external_attr = 0o700 << 16
common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, "foo",
sha1(random_string).hexdigest(),
@@ -679,7 +685,7 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
- output_zip.close()
+ common.ZipClose(output_zip)
zip_file.close()
try:
@@ -727,8 +733,8 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
- output_zip.close()
- output_zip.close()
+ common.ZipClose(output_zip)
+ common.ZipClose(output_zip)
return zip_file
@test_utils.SkipIfExternalToolsUnavailable()
@@ -815,9 +821,9 @@
)
APKCERTS_CERTMAP1 = {
- 'RecoveryLocalizer.apk': 'certs/devkey',
- 'Settings.apk': 'build/make/target/product/security/platform',
- 'TV.apk': 'PRESIGNED',
+ 'RecoveryLocalizer.apk' : 'certs/devkey',
+ 'Settings.apk' : 'build/make/target/product/security/platform',
+ 'TV.apk' : 'PRESIGNED',
}
APKCERTS_TXT2 = (
@@ -832,10 +838,10 @@
)
APKCERTS_CERTMAP2 = {
- 'Compressed1.apk': 'certs/compressed1',
- 'Compressed2a.apk': 'certs/compressed2',
- 'Compressed2b.apk': 'certs/compressed2',
- 'Compressed3.apk': 'certs/compressed3',
+ 'Compressed1.apk' : 'certs/compressed1',
+ 'Compressed2a.apk' : 'certs/compressed2',
+ 'Compressed2b.apk' : 'certs/compressed2',
+ 'Compressed3.apk' : 'certs/compressed3',
}
APKCERTS_TXT3 = (
@@ -844,7 +850,7 @@
)
APKCERTS_CERTMAP3 = {
- 'Compressed4.apk': 'certs/compressed4',
+ 'Compressed4.apk' : 'certs/compressed4',
}
# Test parsing with no optional fields, both optional fields, and only the
@@ -861,9 +867,9 @@
)
APKCERTS_CERTMAP4 = {
- 'RecoveryLocalizer.apk': 'certs/devkey',
- 'Settings.apk': 'build/make/target/product/security/platform',
- 'TV.apk': 'PRESIGNED',
+ 'RecoveryLocalizer.apk' : 'certs/devkey',
+ 'Settings.apk' : 'build/make/target/product/security/platform',
+ 'TV.apk' : 'PRESIGNED',
}
def setUp(self):
@@ -967,7 +973,7 @@
extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
with open(extracted_from_privkey, 'rb') as privkey_fp, \
- open(extracted_from_pubkey, 'rb') as pubkey_fp:
+ open(extracted_from_pubkey, 'rb') as pubkey_fp:
self.assertEqual(privkey_fp.read(), pubkey_fp.read())
def test_ParseCertificate(self):
@@ -1231,8 +1237,7 @@
self.assertEqual(
'1-5 9-10',
sparse_image.file_map['//system/file1'].extra['text_str'])
- self.assertTrue(
- sparse_image.file_map['//system/file2'].extra['incomplete'])
+ self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
self.assertTrue(
sparse_image.file_map['/system/app/file3'].extra['incomplete'])
@@ -1340,7 +1345,7 @@
'recovery_api_version': 3,
'fstab_version': 2,
'system_root_image': 'true',
- 'no_recovery': 'true',
+ 'no_recovery' : 'true',
'recovery_as_boot': 'true',
}
@@ -1661,7 +1666,6 @@
self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
test_file.name, 'generic_kernel')
-
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
@@ -1671,7 +1675,7 @@
def setUp(self):
self._tempdir = common.MakeTempDir()
# Create a fake dict that contains the fstab info for boot&recovery.
- self._info = {"fstab": {}}
+ self._info = {"fstab" : {}}
fake_fstab = [
"/dev/soc.0/by-name/boot /boot emmc defaults defaults",
"/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2018,11 +2022,11 @@
input_zip, 'odm', placeholder_values)
self.assertEqual({
- 'ro.odm.build.date.utc': '1578430045',
- 'ro.odm.build.fingerprint':
- 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
- 'ro.product.odm.device': 'coral',
- 'ro.product.odm.name': 'product1',
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
+ 'ro.product.odm.name': 'product1',
}, partition_props.build_props)
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2205,8 +2209,8 @@
copied_props = copy.deepcopy(partition_props)
self.assertEqual({
- 'ro.odm.build.date.utc': '1578430045',
- 'ro.odm.build.fingerprint':
- 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
- 'ro.product.odm.device': 'coral',
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
}, copied_props.build_props)
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
new file mode 100644
index 0000000..f6c0190
--- /dev/null
+++ b/tools/sbom/Android.bp
@@ -0,0 +1,53 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+python_binary_host {
+ name: "generate-sbom",
+ srcs: [
+ "generate-sbom.py",
+ ],
+ version: {
+ py3: {
+ embedded_launcher: true,
+ },
+ },
+ libs: [
+ "metadata_file_proto_py",
+ "libprotobuf-python",
+ "sbom_lib",
+ ],
+}
+
+python_library_host {
+ name: "sbom_lib",
+ srcs: [
+ "sbom_data.py",
+ "sbom_writers.py",
+ ],
+}
+
+python_test_host {
+ name: "sbom_writers_test",
+ main: "sbom_writers_test.py",
+ srcs: [
+ "sbom_writers_test.py",
+ ],
+ data: [
+ "testdata/*",
+ ],
+ libs: [
+ "sbom_lib",
+ ],
+ test_suites: ["general-tests"],
+}
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
new file mode 100755
index 0000000..192061e
--- /dev/null
+++ b/tools/sbom/generate-sbom.py
@@ -0,0 +1,540 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+ generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+ --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+ --product_out_dir=out/target/product/vsoc_x86_64 \
+ --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+ --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+
+def get_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+ parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+ parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+ parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
+ parser.add_argument('--build_version', required=True, help='The build version.')
+ parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+ parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+ parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
+
+ return parser.parse_args()
+
+
+def log(*info):
+ if args.verbose:
+ for i in info:
+ print(i)
+
+
+def encode_for_spdxid(s):
+ """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+ result = ''
+ for c in s:
+ if c.isalnum() or c in '.-':
+ result += c
+ elif c in '_@/':
+ result += '-'
+ else:
+ result += '0x' + c.encode('utf-8').hex()
+
+ return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+ return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+ return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def checksum(file_path):
+ file_path = args.product_out_dir + '/' + file_path
+ h = hashlib.sha1()
+ if os.path.islink(file_path):
+ h.update(os.readlink(file_path).encode('utf-8'))
+ else:
+ with open(file_path, 'rb') as f:
+ h.update(f.read())
+ return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+ return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
+ 'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
+ 'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
+ 'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
+ 'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
+ 'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
+ 'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
+ 'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
+ 'vndk_prebuilt_shared',
+
+ # 'android_test_import',
+ # 'cc_prebuilt_test_library_shared',
+ # 'java_import_host',
+ # 'java_test_import',
+ # 'llvm_host_prebuilt_library_shared',
+ # 'prebuilt_apis',
+ # 'prebuilt_build_tool',
+ # 'prebuilt_defaults',
+ # 'prebuilt_etc',
+ # 'prebuilt_etc_host',
+ # 'prebuilt_etc_xml',
+ # 'prebuilt_font',
+ # 'prebuilt_hidl_interfaces',
+ # 'prebuilt_platform_compat_config',
+ # 'prebuilt_stubs_sources',
+ # 'prebuilt_usr_share',
+ # 'prebuilt_usr_share_host',
+ # 'soong_config_module_type_import',
+ ]
+
+
+def is_source_package(file_metadata):
+ module_path = file_metadata['module_path']
+ return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+ module_path = file_metadata['module_path']
+ if module_path:
+ return (module_path.startswith('prebuilts/') or
+ is_soong_prebuilt_module(file_metadata) or
+ file_metadata['is_prebuilt_make_module'])
+
+ kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+ if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+ return True
+
+ return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+ """Return source package info exists in its METADATA file, currently including name, security tag
+ and external SBOM reference.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ if not metadata_file_path:
+ return file_metadata['module_path'], []
+
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ external_refs = []
+ for tag in metadata_proto.third_party.security.tag:
+ if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+ external_refs.append(
+ sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe23Type,
+ locator=tag.removeprefix(NVD_CPE23)))
+ elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+ external_refs.append(
+ sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe22Type,
+ locator=tag.removeprefix(NVD_CPE23)))
+
+ if metadata_proto.name:
+ return metadata_proto.name, external_refs
+ else:
+ return os.path.basename(metadata_file_path), external_refs # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+ """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+ module path or kernel module's source path if the installed file is a kernel module.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ name = None
+ if metadata_file_path:
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.name:
+ name = metadata_proto.name
+ else:
+ name = metadata_file_path
+ elif file_metadata['module_path']:
+ name = file_metadata['module_path']
+ elif file_metadata['kernel_module_copy_files']:
+ src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+ name = os.path.dirname(src_path)
+
+ return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+ """Search for METADATA file of a package and return its path."""
+ metadata_path = ''
+ if file_metadata['module_path']:
+ metadata_path = file_metadata['module_path']
+ elif file_metadata['kernel_module_copy_files']:
+ metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+ while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+ metadata_path = os.path.dirname(metadata_path)
+
+ return metadata_path
+
+
+def get_package_version(metadata_file_path):
+ """Return a package's version in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+ """Return a package's homepage URL in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.homepage:
+ return metadata_proto.third_party.homepage
+ for url in metadata_proto.third_party.url:
+ if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+ return url.value
+
+ return None
+
+
+def get_package_download_location(metadata_file_path):
+ """Return a package's code repository URL in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.url:
+ urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+ if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+ return urls[0].value
+ elif len(urls) > 1:
+ return urls[1].value
+
+ return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+ """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+ package, a UPSTREAM package if it's a source package and a external SBOM document reference if
+ it's a prebuilt package with sbom_ref defined in its METADATA file.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ external_doc_ref = None
+ packages = []
+ relationships = []
+
+ # Info from METADATA file
+ homepage = get_package_homepage(metadata_file_path)
+ version = get_package_version(metadata_file_path)
+ download_location = get_package_download_location(metadata_file_path)
+
+ if is_source_package(installed_file_metadata):
+ # Source fork packages
+ name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+ source_package_id = new_package_id(name, PKG_SOURCE)
+ source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+ download_location=sbom_data.VALUE_NONE,
+ supplier='Organization: ' + args.product_mfr,
+ external_refs=external_refs)
+
+ upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+ upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+ supplier=('Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+ download_location=download_location)
+ packages += [source_package, upstream_package]
+ relationships.append(sbom_data.Relationship(id1=source_package_id,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=upstream_package_id))
+ elif is_prebuilt_package(installed_file_metadata):
+ # Prebuilt fork packages
+ name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+ prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+ prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+ name=name,
+ download_location=sbom_data.VALUE_NONE,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr)
+ packages.append(prebuilt_package)
+
+ if metadata_file_path:
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+ sbom_url = metadata_proto.third_party.sbom_ref.url
+ sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+ upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+ if sbom_url and sbom_checksum and upstream_element_id:
+ doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+ external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+ uri=sbom_url,
+ checksum=sbom_checksum)
+ relationships.append(
+ sbom_data.Relationship(id1=prebuilt_package_id,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=doc_ref_id + ':' + upstream_element_id))
+
+ return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+ checksums = [file.checksum for file in files]
+ checksums.sort()
+ h = hashlib.sha1()
+ h.update(''.join(checksums).encode(encoding='utf-8'))
+ return h.hexdigest()
+
+
+def save_report(report):
+ prefix, _ = os.path.splitext(args.output_file)
+ with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
+ for type, issues in report.items():
+ report_file.write(type + '\n')
+ for issue in issues:
+ report_file.write('\t' + issue + '\n')
+ report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+ installed_file = installed_file_metadata['installed_file']
+ module_path = installed_file_metadata['module_path']
+ product_copy_files = installed_file_metadata['product_copy_files']
+ kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+ is_platform_generated = installed_file_metadata['is_platform_generated']
+
+ if (not module_path and
+ not product_copy_files and
+ not kernel_module_copy_files and
+ not is_platform_generated and
+ not installed_file.endswith('.fsv_meta')):
+ report[ISSUE_NO_METADATA].append(installed_file)
+ return False
+
+ return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+ if metadata_file_path:
+ report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+ 'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+ installed_file_metadata['installed_file'],
+ installed_file_metadata['module_path'],
+ metadata_file_path + '/METADATA'))
+
+ package_metadata = metadata_file_pb2.Metadata()
+ with open(metadata_file_path + '/METADATA', 'rt') as f:
+ text_format.Parse(f.read(), package_metadata)
+
+ if not metadata_file_path in metadata_file_protos:
+ metadata_file_protos[metadata_file_path] = package_metadata
+ if not package_metadata.name:
+ report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+ if not package_metadata.third_party.version:
+ report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+ f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+ for tag in package_metadata.third_party.security.tag:
+ if not tag.startswith(NVD_CPE23):
+ report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+ f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+ else:
+ report[ISSUE_NO_METADATA_FILE].append(
+ "installed_file: {}, module_path: {}".format(
+ installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_sbom_for_unbundled():
+ with open(args.metadata, newline='') as sbom_metadata_file:
+ reader = csv.DictReader(sbom_metadata_file)
+ doc = sbom_data.Document(name=args.build_version,
+ namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+ creators=['Organization: ' + args.product_mfr])
+ for installed_file_metadata in reader:
+ installed_file = installed_file_metadata['installed_file']
+ if args.output_file != args.product_out_dir + installed_file + ".spdx":
+ continue
+
+ module_path = installed_file_metadata['module_path']
+ package_id = new_package_id(module_path, PKG_PREBUILT)
+ package = sbom_data.Package(id=package_id,
+ name=module_path,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr)
+ file_id = new_file_id(installed_file)
+ file = sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file))
+ relationship = sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=package_id)
+ doc.add_package(package)
+ doc.files.append(file)
+ doc.describes = file_id
+ doc.add_relationship(relationship)
+ doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+ break
+
+ with open(args.output_file, 'w', encoding="utf-8") as file:
+ sbom_writers.TagValueWriter.write(doc, file, fragment=True)
+
+
+def main():
+ global args
+ args = get_args()
+ log('Args:', vars(args))
+
+ if args.unbundled:
+ generate_sbom_for_unbundled()
+ return
+
+ global metadata_file_protos
+ metadata_file_protos = {}
+
+ doc = sbom_data.Document(name=args.build_version,
+ namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+ creators=['Organization: ' + args.product_mfr])
+
+ product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+ name=sbom_data.PACKAGE_NAME_PRODUCT,
+ download_location=sbom_data.VALUE_NONE,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr,
+ files_analyzed=True)
+ doc.packages.append(product_package)
+
+ doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+ name=sbom_data.PACKAGE_NAME_PLATFORM,
+ download_location=sbom_data.VALUE_NONE,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr))
+
+ # Report on some issues and information
+ report = {
+ ISSUE_NO_METADATA: [],
+ ISSUE_NO_METADATA_FILE: [],
+ ISSUE_METADATA_FILE_INCOMPLETE: [],
+ ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+ ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+ INFO_METADATA_FOUND_FOR_PACKAGE: [],
+ }
+
+ # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+ with open(args.metadata, newline='') as sbom_metadata_file:
+ reader = csv.DictReader(sbom_metadata_file)
+ for installed_file_metadata in reader:
+ installed_file = installed_file_metadata['installed_file']
+ module_path = installed_file_metadata['module_path']
+ product_copy_files = installed_file_metadata['product_copy_files']
+ kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+
+ if not installed_file_has_metadata(installed_file_metadata, report):
+ continue
+ file_path = args.product_out_dir + '/' + installed_file
+ if not (os.path.islink(file_path) or os.path.isfile(file_path)):
+ report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+ continue
+
+ file_id = new_file_id(installed_file)
+ doc.files.append(
+ sbom_data.File(id=file_id, name=installed_file, checksum=checksum(installed_file)))
+ product_package.file_ids.append(file_id)
+
+ if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+ metadata_file_path = get_metadata_file_path(installed_file_metadata)
+ report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+ # File from source fork packages or prebuilt fork packages
+ external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+ if len(pkgs) > 0:
+ if external_doc_ref:
+ doc.add_external_ref(external_doc_ref)
+ for p in pkgs:
+ doc.add_package(p)
+ for rel in rels:
+ doc.add_relationship(rel)
+ fork_package_id = pkgs[0].id # The first package should be the source/prebuilt fork package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=fork_package_id))
+ elif module_path or installed_file_metadata['is_platform_generated']:
+ # File from PLATFORM package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif product_copy_files:
+ # Format of product_copy_files: <source path>:<dest path>
+ src_path = product_copy_files.split(':')[0]
+ # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+ # so process them as files from PLATFORM package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif installed_file.endswith('.fsv_meta'):
+ # See build/make/core/Makefile:2988
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+ # For the four files generated for _dlkm, _ramdisk partitions
+ # See build/make/core/Makefile:323
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+
+ product_package.verification_code = generate_package_verification_code(doc.files)
+
+ # Save SBOM records to output file
+ doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+ with open(args.output_file, 'w', encoding="utf-8") as file:
+ sbom_writers.TagValueWriter.write(doc, file)
+ if args.json:
+ with open(args.output_file+'.json', 'w', encoding="utf-8") as file:
+ sbom_writers.JSONWriter.write(doc, file)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
new file mode 100644
index 0000000..d2ef48d
--- /dev/null
+++ b/tools/sbom/sbom_data.py
@@ -0,0 +1,123 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Define data classes that model SBOMs defined by SPDX. The data classes could be
+written out to different formats (tagvalue, JSON, etc) of SPDX with corresponding
+writer utilities.
+
+Rrefer to SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/ and go/android-spdx for details of
+fields in each data class.
+"""
+
+from dataclasses import dataclass, field
+from typing import List
+
+SPDXID_DOC = 'SPDXRef-DOCUMENT'
+SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
+SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+
+PACKAGE_NAME_PRODUCT = 'PRODUCT'
+PACKAGE_NAME_PLATFORM = 'PLATFORM'
+
+VALUE_NOASSERTION = 'NOASSERTION'
+VALUE_NONE = 'NONE'
+
+
+class PackageExternalRefCategory:
+ SECURITY = 'SECURITY'
+ PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+ PERSISTENT_ID = 'PERSISTENT-ID'
+ OTHER = 'OTHER'
+
+
+class PackageExternalRefType:
+ cpe22Type = 'cpe22Type'
+ cpe23Type = 'cpe23Type'
+
+
+@dataclass
+class PackageExternalRef:
+ category: PackageExternalRefCategory
+ type: PackageExternalRefType
+ locator: str
+
+
+@dataclass
+class Package:
+ name: str
+ id: str
+ version: str = None
+ supplier: str = None
+ download_location: str = None
+ files_analyzed: bool = False
+ verification_code: str = None
+ file_ids: List[str] = field(default_factory=list)
+ external_refs: List[PackageExternalRef] = field(default_factory=list)
+
+
+@dataclass
+class File:
+ id: str
+ name: str
+ checksum: str
+
+
+class RelationshipType:
+ DESCRIBES = 'DESCRIBES'
+ VARIANT_OF = 'VARIANT_OF'
+ GENERATED_FROM = 'GENERATED_FROM'
+
+
+@dataclass
+class Relationship:
+ id1: str
+ relationship: RelationshipType
+ id2: str
+
+
+@dataclass
+class DocumentExternalReference:
+ id: str
+ uri: str
+ checksum: str
+
+
+@dataclass
+class Document:
+ name: str
+ namespace: str
+ id: str = SPDXID_DOC
+ describes: str = SPDXID_PRODUCT
+ creators: List[str] = field(default_factory=list)
+ created: str = None
+ external_refs: List[DocumentExternalReference] = field(default_factory=list)
+ packages: List[Package] = field(default_factory=list)
+ files: List[File] = field(default_factory=list)
+ relationships: List[Relationship] = field(default_factory=list)
+
+ def add_external_ref(self, external_ref):
+ if not any(external_ref.uri == ref.uri for ref in self.external_refs):
+ self.external_refs.append(external_ref)
+
+ def add_package(self, package):
+ if not any(package.id == p.id for p in self.packages):
+ self.packages.append(package)
+
+ def add_relationship(self, rel):
+ if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
+ for r in self.relationships):
+ self.relationships.append(rel)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
new file mode 100644
index 0000000..b1c66c5
--- /dev/null
+++ b/tools/sbom/sbom_writers.py
@@ -0,0 +1,365 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Serialize objects defined in package sbom_data to SPDX format: tagvalue, JSON.
+"""
+
+import json
+import sbom_data
+
+SPDX_VER = 'SPDX-2.3'
+DATA_LIC = 'CC0-1.0'
+
+
+class Tags:
+ # Common
+ SPDXID = 'SPDXID'
+ SPDX_VERSION = 'SPDXVersion'
+ DATA_LICENSE = 'DataLicense'
+ DOCUMENT_NAME = 'DocumentName'
+ DOCUMENT_NAMESPACE = 'DocumentNamespace'
+ CREATED = 'Created'
+ CREATOR = 'Creator'
+ EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+ # Package
+ PACKAGE_NAME = 'PackageName'
+ PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+ PACKAGE_VERSION = 'PackageVersion'
+ PACKAGE_SUPPLIER = 'PackageSupplier'
+ FILES_ANALYZED = 'FilesAnalyzed'
+ PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+ PACKAGE_EXTERNAL_REF = 'ExternalRef'
+ # Package license
+ PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+ PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+ PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+ PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+ # File
+ FILE_NAME = 'FileName'
+ FILE_CHECKSUM = 'FileChecksum'
+ # File license
+ FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+ FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+ FILE_LICENSE_COMMENTS = 'LicenseComments'
+ FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+ FILE_NOTICE = 'FileNotice'
+ FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+ # Relationship
+ RELATIONSHIP = 'Relationship'
+
+
+class TagValueWriter:
+ @staticmethod
+ def marshal_doc_headers(sbom_doc):
+ headers = [
+ f'{Tags.SPDX_VERSION}: {SPDX_VER}',
+ f'{Tags.DATA_LICENSE}: {DATA_LIC}',
+ f'{Tags.SPDXID}: {sbom_doc.id}',
+ f'{Tags.DOCUMENT_NAME}: {sbom_doc.name}',
+ f'{Tags.DOCUMENT_NAMESPACE}: {sbom_doc.namespace}',
+ ]
+ for creator in sbom_doc.creators:
+ headers.append(f'{Tags.CREATOR}: {creator}')
+ headers.append(f'{Tags.CREATED}: {sbom_doc.created}')
+ for doc_ref in sbom_doc.external_refs:
+ headers.append(
+ f'{Tags.EXTERNAL_DOCUMENT_REF}: {doc_ref.id} {doc_ref.uri} {doc_ref.checksum}')
+ headers.append('')
+ return headers
+
+ @staticmethod
+ def marshal_package(package):
+ download_location = sbom_data.VALUE_NOASSERTION
+ if package.download_location:
+ download_location = package.download_location
+ tagvalues = [
+ f'{Tags.PACKAGE_NAME}: {package.name}',
+ f'{Tags.SPDXID}: {package.id}',
+ f'{Tags.PACKAGE_DOWNLOAD_LOCATION}: {download_location}',
+ f'{Tags.FILES_ANALYZED}: {str(package.files_analyzed).lower()}',
+ ]
+ if package.version:
+ tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
+ if package.supplier:
+ tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+ if package.verification_code:
+ tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
+ if package.external_refs:
+ for external_ref in package.external_refs:
+ tagvalues.append(
+ f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
+
+ tagvalues.append('')
+ return tagvalues
+
+ @staticmethod
+ def marshal_described_element(sbom_doc):
+ if not sbom_doc.describes:
+ return None
+
+ product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
+ if product_package:
+ tagvalues = TagValueWriter.marshal_package(product_package[0])
+ tagvalues.append(
+ f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+ tagvalues.append('')
+ return tagvalues
+
+ file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
+ if file:
+ tagvalues = [
+ f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}'
+ ]
+
+ return tagvalues
+
+ return None
+
+ @staticmethod
+ def marshal_packages(sbom_doc):
+ tagvalues = []
+ marshaled_relationships = []
+ i = 0
+ packages = sbom_doc.packages
+ while i < len(packages):
+ if packages[i].id == sbom_doc.describes:
+ i += 1
+ continue
+
+ if i + 1 < len(packages) \
+ and packages[i].id.startswith('SPDXRef-SOURCE-') \
+ and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
+ tagvalues += TagValueWriter.marshal_package(packages[i])
+ tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+ rel = next((r for r in sbom_doc.relationships if
+ r.id1 == packages[i].id and
+ r.id2 == packages[i + 1].id and
+ r.relationship == sbom_data.RelationshipType.VARIANT_OF), None)
+ if rel:
+ marshaled_relationships.append(rel)
+ tagvalues.append(TagValueWriter.marshal_relationship(rel))
+ tagvalues.append('')
+
+ i += 2
+ else:
+ tagvalues += TagValueWriter.marshal_package(packages[i])
+ i += 1
+
+ return tagvalues, marshaled_relationships
+
+ @staticmethod
+ def marshal_file(file):
+ tagvalues = [
+ f'{Tags.FILE_NAME}: {file.name}',
+ f'{Tags.SPDXID}: {file.id}',
+ f'{Tags.FILE_CHECKSUM}: {file.checksum}',
+ '',
+ ]
+
+ return tagvalues
+
+ @staticmethod
+ def marshal_files(sbom_doc):
+ tagvalues = []
+ for file in sbom_doc.files:
+ tagvalues += TagValueWriter.marshal_file(file)
+ return tagvalues
+
+ @staticmethod
+ def marshal_relationship(rel):
+ return f'{Tags.RELATIONSHIP}: {rel.id1} {rel.relationship} {rel.id2}'
+
+ @staticmethod
+ def marshal_relationships(sbom_doc, marshaled_rels):
+ tagvalues = []
+ sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.id2 + r.id1)
+ for rel in sorted_rels:
+ if any(r.id1 == rel.id1 and r.id2 == rel.id2 and r.relationship == rel.relationship
+ for r in marshaled_rels):
+ continue
+ tagvalues.append(TagValueWriter.marshal_relationship(rel))
+ tagvalues.append('')
+ return tagvalues
+
+ @staticmethod
+ def write(sbom_doc, file, fragment=False):
+ content = []
+ if not fragment:
+ content += TagValueWriter.marshal_doc_headers(sbom_doc)
+ described_element = TagValueWriter.marshal_described_element(sbom_doc)
+ if described_element:
+ content += described_element
+ content += TagValueWriter.marshal_files(sbom_doc)
+ tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+ content += tagvalues
+ content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+ file.write('\n'.join(content))
+
+
+class PropNames:
+ # Common
+ SPDXID = 'SPDXID'
+ SPDX_VERSION = 'spdxVersion'
+ DATA_LICENSE = 'dataLicense'
+ NAME = 'name'
+ DOCUMENT_NAMESPACE = 'documentNamespace'
+ CREATION_INFO = 'creationInfo'
+ CREATORS = 'creators'
+ CREATED = 'created'
+ EXTERNAL_DOCUMENT_REF = 'externalDocumentRefs'
+ DOCUMENT_DESCRIBES = 'documentDescribes'
+ EXTERNAL_DOCUMENT_ID = 'externalDocumentId'
+ EXTERNAL_DOCUMENT_URI = 'spdxDocument'
+ EXTERNAL_DOCUMENT_CHECKSUM = 'checksum'
+ ALGORITHM = 'algorithm'
+ CHECKSUM_VALUE = 'checksumValue'
+
+ # Package
+ PACKAGES = 'packages'
+ PACKAGE_DOWNLOAD_LOCATION = 'downloadLocation'
+ PACKAGE_VERSION = 'versionInfo'
+ PACKAGE_SUPPLIER = 'supplier'
+ FILES_ANALYZED = 'filesAnalyzed'
+ PACKAGE_VERIFICATION_CODE = 'packageVerificationCode'
+ PACKAGE_VERIFICATION_CODE_VALUE = 'packageVerificationCodeValue'
+ PACKAGE_EXTERNAL_REFS = 'externalRefs'
+ PACKAGE_EXTERNAL_REF_CATEGORY = 'referenceCategory'
+ PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
+ PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
+ PACKAGE_HAS_FILES = 'hasFiles'
+
+ # File
+ FILES = 'files'
+ FILE_NAME = 'fileName'
+ FILE_CHECKSUMS = 'checksums'
+
+ # Relationship
+ RELATIONSHIPS = 'relationships'
+ REL_ELEMENT_ID = 'spdxElementId'
+ REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
+ REL_TYPE = 'relationshipType'
+
+
+class JSONWriter:
+ @staticmethod
+ def marshal_doc_headers(sbom_doc):
+ headers = {
+ PropNames.SPDX_VERSION: SPDX_VER,
+ PropNames.DATA_LICENSE: DATA_LIC,
+ PropNames.SPDXID: sbom_doc.id,
+ PropNames.NAME: sbom_doc.name,
+ PropNames.DOCUMENT_NAMESPACE: sbom_doc.namespace,
+ PropNames.CREATION_INFO: {}
+ }
+ creators = [creator for creator in sbom_doc.creators]
+ headers[PropNames.CREATION_INFO][PropNames.CREATORS] = creators
+ headers[PropNames.CREATION_INFO][PropNames.CREATED] = sbom_doc.created
+ external_refs = []
+ for doc_ref in sbom_doc.external_refs:
+ checksum = doc_ref.checksum.split(': ')
+ external_refs.append({
+ PropNames.EXTERNAL_DOCUMENT_ID: f'{doc_ref.id}',
+ PropNames.EXTERNAL_DOCUMENT_URI: doc_ref.uri,
+ PropNames.EXTERNAL_DOCUMENT_CHECKSUM: {
+ PropNames.ALGORITHM: checksum[0],
+ PropNames.CHECKSUM_VALUE: checksum[1]
+ }
+ })
+ if external_refs:
+ headers[PropNames.EXTERNAL_DOCUMENT_REF] = external_refs
+ headers[PropNames.DOCUMENT_DESCRIBES] = [sbom_doc.describes]
+
+ return headers
+
+ @staticmethod
+ def marshal_packages(sbom_doc):
+ packages = []
+ for p in sbom_doc.packages:
+ package = {
+ PropNames.NAME: p.name,
+ PropNames.SPDXID: p.id,
+ PropNames.PACKAGE_DOWNLOAD_LOCATION: p.download_location if p.download_location else sbom_data.VALUE_NOASSERTION,
+ PropNames.FILES_ANALYZED: p.files_analyzed
+ }
+ if p.version:
+ package[PropNames.PACKAGE_VERSION] = p.version
+ if p.supplier:
+ package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+ if p.verification_code:
+ package[PropNames.PACKAGE_VERIFICATION_CODE] = {
+ PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
+ }
+ if p.external_refs:
+ package[PropNames.PACKAGE_EXTERNAL_REFS] = []
+ for ref in p.external_refs:
+ ext_ref = {
+ PropNames.PACKAGE_EXTERNAL_REF_CATEGORY: ref.category,
+ PropNames.PACKAGE_EXTERNAL_REF_TYPE: ref.type,
+ PropNames.PACKAGE_EXTERNAL_REF_LOCATOR: ref.locator,
+ }
+ package[PropNames.PACKAGE_EXTERNAL_REFS].append(ext_ref)
+ if p.file_ids:
+ package[PropNames.PACKAGE_HAS_FILES] = []
+ for file_id in p.file_ids:
+ package[PropNames.PACKAGE_HAS_FILES].append(file_id)
+
+ packages.append(package)
+
+ return {PropNames.PACKAGES: packages}
+
+ @staticmethod
+ def marshal_files(sbom_doc):
+ files = []
+ for f in sbom_doc.files:
+ file = {
+ PropNames.FILE_NAME: f.name,
+ PropNames.SPDXID: f.id
+ }
+ checksum = f.checksum.split(': ')
+ file[PropNames.FILE_CHECKSUMS] = [{
+ PropNames.ALGORITHM: checksum[0],
+ PropNames.CHECKSUM_VALUE: checksum[1],
+ }]
+ files.append(file)
+ return {PropNames.FILES: files}
+
+ @staticmethod
+ def marshal_relationships(sbom_doc):
+ relationships = []
+ sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.relationship + r.id2 + r.id1)
+ for r in sorted_rels:
+ rel = {
+ PropNames.REL_ELEMENT_ID: r.id1,
+ PropNames.REL_RELATED_ELEMENT_ID: r.id2,
+ PropNames.REL_TYPE: r.relationship,
+ }
+ relationships.append(rel)
+
+ return {PropNames.RELATIONSHIPS: relationships}
+
+ @staticmethod
+ def write(sbom_doc, file):
+ doc = {}
+ doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
+ doc.update(JSONWriter.marshal_packages(sbom_doc))
+ doc.update(JSONWriter.marshal_files(sbom_doc))
+ doc.update(JSONWriter.marshal_relationships(sbom_doc))
+ file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
new file mode 100644
index 0000000..361dae6
--- /dev/null
+++ b/tools/sbom/sbom_writers_test.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import pathlib
+import unittest
+import sbom_data
+import sbom_writers
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+
+
+class SBOMWritersTest(unittest.TestCase):
+
+ def setUp(self):
+ # SBOM of a product
+ self.sbom_doc = sbom_data.Document(name='test doc',
+ namespace='http://www.google.com/sbom/spdx/android',
+ creators=[SUPPLIER_GOOGLE],
+ created='2023-03-31T22:17:58Z',
+ describes=sbom_data.SPDXID_PRODUCT)
+ self.sbom_doc.add_external_ref(
+ sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+ uri='external_doc_uri',
+ checksum='SHA1: 1234567890'))
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+ name=sbom_data.PACKAGE_NAME_PRODUCT,
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ files_analyzed=True,
+ verification_code='123456',
+ file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3]))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+ name=sbom_data.PACKAGE_NAME_PLATFORM,
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+ name='Prebuilt package1',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+ name='Source package1',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ external_refs=[sbom_data.PackageExternalRef(
+ category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe22Type,
+ locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+ name='Upstream package1',
+ supplier=SUPPLIER_UPSTREAM,
+ version='1.1',
+ ))
+
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=SPDXID_UPSTREAM_PACKAGE1))
+
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_PREBUILT_PACKAGE1))
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_SOURCE_PACKAGE1
+ ))
+
+ # SBOM fragment of a APK
+ self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
+ namespace='http://www.google.com/sbom/spdx/android',
+ creators=[SUPPLIER_GOOGLE],
+ created='2023-03-31T22:17:58Z',
+ describes=SPDXID_FILE1)
+
+ self.unbundled_sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE1, name='/bin/file1.apk', checksum='SHA1: 11111'))
+ self.unbundled_sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+ name='Unbundled apk package',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT))
+ self.unbundled_sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_SOURCE_PACKAGE1))
+
+ def test_tagvalue_writer(self):
+ with io.StringIO() as output:
+ sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+ expected_output = pathlib.Path('testdata/expected_tagvalue_sbom.spdx').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+ def test_tagvalue_writer_unbundled(self):
+ with io.StringIO() as output:
+ sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
+ expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_unbundled.spdx').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+ def test_json_writer(self):
+ with io.StringIO() as output:
+ sbom_writers.JSONWriter.write(self.sbom_doc, output)
+ expected_output = pathlib.Path('testdata/expected_json_sbom.spdx.json').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
new file mode 100644
index 0000000..32715a5
--- /dev/null
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -0,0 +1,137 @@
+{
+ "spdxVersion": "SPDX-2.3",
+ "dataLicense": "CC0-1.0",
+ "SPDXID": "SPDXRef-DOCUMENT",
+ "name": "test doc",
+ "documentNamespace": "http://www.google.com/sbom/spdx/android",
+ "creationInfo": {
+ "creators": [
+ "Organization: Google"
+ ],
+ "created": "2023-03-31T22:17:58Z"
+ },
+ "externalDocumentRefs": [
+ {
+ "externalDocumentId": "DocumentRef-external_doc_ref",
+ "spdxDocument": "external_doc_uri",
+ "checksum": {
+ "algorithm": "SHA1",
+ "checksumValue": "1234567890"
+ }
+ }
+ ],
+ "documentDescribes": [
+ "SPDXRef-PRODUCT"
+ ],
+ "packages": [
+ {
+ "name": "PRODUCT",
+ "SPDXID": "SPDXRef-PRODUCT",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": true,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google",
+ "packageVerificationCode": {
+ "packageVerificationCodeValue": "123456"
+ },
+ "hasFiles": [
+ "SPDXRef-file1",
+ "SPDXRef-file2",
+ "SPDXRef-file3"
+ ]
+ },
+ {
+ "name": "PLATFORM",
+ "SPDXID": "SPDXRef-PLATFORM",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google"
+ },
+ {
+ "name": "Prebuilt package1",
+ "SPDXID": "SPDXRef-PREBUILT-package1",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google"
+ },
+ {
+ "name": "Source package1",
+ "SPDXID": "SPDXRef-SOURCE-package1",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google",
+ "externalRefs": [
+ {
+ "referenceCategory": "SECURITY",
+ "referenceType": "cpe22Type",
+ "referenceLocator": "cpe:/a:jsoncpp_project:jsoncpp:1.9.4"
+ }
+ ]
+ },
+ {
+ "name": "Upstream package1",
+ "SPDXID": "SPDXRef-UPSTREAM-package1",
+ "downloadLocation": "NOASSERTION",
+ "filesAnalyzed": false,
+ "versionInfo": "1.1",
+ "supplier": "Organization: upstream"
+ }
+ ],
+ "files": [
+ {
+ "fileName": "/bin/file1",
+ "SPDXID": "SPDXRef-file1",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "11111"
+ }
+ ]
+ },
+ {
+ "fileName": "/bin/file2",
+ "SPDXID": "SPDXRef-file2",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "22222"
+ }
+ ]
+ },
+ {
+ "fileName": "/bin/file3",
+ "SPDXID": "SPDXRef-file3",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "33333"
+ }
+ ]
+ }
+ ],
+ "relationships": [
+ {
+ "spdxElementId": "SPDXRef-file1",
+ "relatedSpdxElement": "SPDXRef-PLATFORM",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-file2",
+ "relatedSpdxElement": "SPDXRef-PREBUILT-package1",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-file3",
+ "relatedSpdxElement": "SPDXRef-SOURCE-package1",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-SOURCE-package1",
+ "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
+ "relationshipType": "VARIANT_OF"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
new file mode 100644
index 0000000..ee39e82
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -0,0 +1,65 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NOASSERTION
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
new file mode 100644
index 0000000..a00c291
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -0,0 +1,12 @@
+FileName: /bin/file1.apk
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+PackageName: Unbundled apk package
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1