Merge "Initial implementation of layoutlib SBOM generation." into main
diff --git a/Changes.md b/Changes.md
index 6c0cf70..fc15e60 100644
--- a/Changes.md
+++ b/Changes.md
@@ -1,5 +1,14 @@
# Build System Changes for Android.mk/Android.bp Writers
+## Soong genrules are now sandboxed
+
+Previously, soong genrules could access any files in the source tree, without specifying them as
+inputs. This makes them incorrect in incremental builds, and incompatible with RBE and Bazel.
+
+Now, genrules are sandboxed so they can only access their listed srcs. Modules denylisted in
+genrule/allowlists.go are exempt from this. You can also set `BUILD_BROKEN_GENRULE_SANDBOXING`
+in board config to disable this behavior.
+
## Partitions are no longer affected by previous builds
Partition builds used to include everything in their staging directories, and building an
diff --git a/core/Makefile b/core/Makefile
index e666dec..a253026 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -2131,6 +2131,7 @@
$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR),$(hide) echo "$(1)_erofs_compressor=$(BOARD_$(_var)IMAGE_EROFS_COMPRESSOR)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS),$(hide) echo "$(1)_erofs_compress_hints=$(BOARD_$(_var)IMAGE_EROFS_COMPRESS_HINTS)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE),$(hide) echo "$(1)_erofs_pcluster_size=$(BOARD_$(_var)IMAGE_EROFS_PCLUSTER_SIZE)" >> $(2))
+$(if $(BOARD_$(_var)IMAGE_EROFS_BLOCKSIZE),$(hide) echo "$(1)_erofs_blocksize=$(BOARD_$(_var)IMAGE_EROFS_BLOCKSIZE)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT),$(hide) echo "$(1)_extfs_inode_count=$(BOARD_$(_var)IMAGE_EXTFS_INODE_COUNT)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT),$(hide) echo "$(1)_extfs_rsv_pct=$(BOARD_$(_var)IMAGE_EXTFS_RSV_PCT)" >> $(2))
$(if $(BOARD_$(_var)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS),$(hide) echo "$(1)_f2fs_sldc_flags=$(BOARD_$(_var)IMAGE_F2FS_SLOAD_COMPRESS_FLAGS)" >> $(2))
@@ -2220,6 +2221,7 @@
$(if $(BOARD_EROFS_COMPRESSOR),$(hide) echo "erofs_default_compressor=$(BOARD_EROFS_COMPRESSOR)" >> $(1))
$(if $(BOARD_EROFS_COMPRESS_HINTS),$(hide) echo "erofs_default_compress_hints=$(BOARD_EROFS_COMPRESS_HINTS)" >> $(1))
$(if $(BOARD_EROFS_PCLUSTER_SIZE),$(hide) echo "erofs_pcluster_size=$(BOARD_EROFS_PCLUSTER_SIZE)" >> $(1))
+$(if $(BOARD_EROFS_BLOCKSIZE),$(hide) echo "erofs_blocksize=$(BOARD_EROFS_BLOCKSIZE)" >> $(1))
$(if $(BOARD_EROFS_SHARE_DUP_BLOCKS),$(hide) echo "erofs_share_dup_blocks=$(BOARD_EROFS_SHARE_DUP_BLOCKS)" >> $(1))
$(if $(BOARD_EROFS_USE_LEGACY_COMPRESSION),$(hide) echo "erofs_use_legacy_compression=$(BOARD_EROFS_USE_LEGACY_COMPRESSION)" >> $(1))
$(if $(BOARD_EXT4_SHARE_DUP_BLOCKS),$(hide) echo "ext4_share_dup_blocks=$(BOARD_EXT4_SHARE_DUP_BLOCKS)" >> $(1))
diff --git a/core/board_config.mk b/core/board_config.mk
index eb4c5ec..b7ca3a4 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -188,6 +188,7 @@
BUILD_BROKEN_VENDOR_PROPERTY_NAMESPACE \
BUILD_BROKEN_VINTF_PRODUCT_COPY_FILES \
BUILD_BROKEN_INCORRECT_PARTITION_IMAGES \
+ BUILD_BROKEN_GENRULE_SANDBOXING \
_build_broken_var_list += \
$(foreach m,$(AVAILABLE_BUILD_MODULE_TYPES) \
@@ -223,6 +224,8 @@
board_config_mk := \
$(strip $(sort $(wildcard \
$(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)/BoardConfig.mk \
+ device/generic/goldfish/board/$(TARGET_DEVICE)/BoardConfig.mk \
+ device/google/cuttlefish/board/$(TARGET_DEVICE)/BoardConfig.mk \
$(shell test -d device && find -L device -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
$(shell test -d vendor && find -L vendor -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index f68c741..be6a795 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -16,6 +16,10 @@
# PRODUCT_AFDO_PROFILES takes precedence over product-agnostic profiles in AFDO_PROFILES
ALL_AFDO_PROFILES := $(PRODUCT_AFDO_PROFILES) $(AFDO_PROFILES)
+ifneq (,$(filter-out environment undefined,$(origin GENRULE_SANDBOXING)))
+ $(error GENRULE_SANDBOXING can only be provided via an environment variable, use BUILD_BROKEN_GENRULE_SANDBOXING to disable genrule sandboxing in board config)
+endif
+
ifeq ($(WRITE_SOONG_VARIABLES),true)
# Create soong.variables with copies of makefile settings. Runs every build,
@@ -281,7 +285,8 @@
$(call add_json_bool, BuildBrokenClangProperty, $(filter true,$(BUILD_BROKEN_CLANG_PROPERTY)))
$(call add_json_bool, BuildBrokenClangAsFlags, $(filter true,$(BUILD_BROKEN_CLANG_ASFLAGS)))
$(call add_json_bool, BuildBrokenClangCFlags, $(filter true,$(BUILD_BROKEN_CLANG_CFLAGS)))
-$(call add_json_bool, GenruleSandboxing, $(filter true,$(GENRULE_SANDBOXING)))
+# Use the value of GENRULE_SANDBOXING if set, otherwise use the inverse of BUILD_BROKEN_GENRULE_SANDBOXING
+$(call add_json_bool, GenruleSandboxing, $(if $(GENRULE_SANDBOXING),$(filter true,$(GENRULE_SANDBOXING)),$(if $(filter true,$(BUILD_BROKEN_GENRULE_SANDBOXING)),,true)))
$(call add_json_bool, BuildBrokenEnforceSyspropOwner, $(filter true,$(BUILD_BROKEN_ENFORCE_SYSPROP_OWNER)))
$(call add_json_bool, BuildBrokenTrebleSyspropNeverallow, $(filter true,$(BUILD_BROKEN_TREBLE_SYSPROP_NEVERALLOW)))
$(call add_json_bool, BuildBrokenUsesSoongPython2Modules, $(filter true,$(BUILD_BROKEN_USES_SOONG_PYTHON2_MODULES)))
@@ -381,6 +386,8 @@
$(call add_json_bool, CopyImagesForTargetFilesZip, $(filter true,$(COPY_IMAGES_FOR_TARGET_FILES_ZIP)))
$(call add_json_bool, BoardAvbEnable, $(filter true,$(BOARD_AVB_ENABLE)))
+
+ $(call add_json_list, ProductPackages, $(sort $(PRODUCT_PACKAGES)))
$(call end_json_map)
$(call add_json_bool, NextReleaseHideFlaggedApi, $(filter true,$(PRODUCT_NEXT_RELEASE_HIDE_FLAGGED_API)))
diff --git a/core/sysprop.mk b/core/sysprop.mk
index a37fd05..4e8e976 100644
--- a/core/sysprop.mk
+++ b/core/sysprop.mk
@@ -46,7 +46,6 @@
echo "ro.product.$(1).manufacturer=$(PRODUCT_MANUFACTURER)" >> $(2);\
echo "ro.product.$(1).model=$(PRODUCT_MODEL)" >> $(2);\
echo "ro.product.$(1).name=$(TARGET_PRODUCT)" >> $(2);\
- # Attestation specific properties for AOSP/GSI build running on device.
if [ -n "$(strip $(PRODUCT_MODEL_FOR_ATTESTATION))" ]; then \
echo "ro.product.model_for_attestation=$(PRODUCT_MODEL_FOR_ATTESTATION)" >> $(2);\
fi; \
diff --git a/envsetup.sh b/envsetup.sh
index 9d27c9d..af6695f 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -2054,6 +2054,11 @@
"$ANDROID_SOONG_HOST_OUT"/bin/avbtool $@
}
+function overrideflags() {
+ local T="$(gettop)"
+ (\cd "${T}" && build/make/tools/overrideflags.sh "$@")
+}
+
validate_current_shell
set_global_paths
source_vendorsetup
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 4a30574..eac3026 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -53,6 +53,7 @@
com.android.btservices \
com.android.configinfrastructure \
com.android.conscrypt \
+ com.android.crashrecovery \
com.android.devicelock \
com.android.extservices \
com.android.healthfitness \
@@ -94,6 +95,7 @@
flags_health_check \
framework-graphics \
framework-minus-apex \
+ framework-minus-apex-install-dependencies \
framework-res \
framework-sysconfig.xml \
fsck.erofs \
diff --git a/target/product/default_art_config.mk b/target/product/default_art_config.mk
index 04e9748..f7c92aa 100644
--- a/target/product/default_art_config.mk
+++ b/target/product/default_art_config.mk
@@ -65,6 +65,7 @@
com.android.btservices:framework-bluetooth \
com.android.configinfrastructure:framework-configinfrastructure \
com.android.conscrypt:conscrypt \
+ com.android.crashrecovery:framework-crashrecovery \
com.android.devicelock:framework-devicelock \
com.android.healthfitness:framework-healthfitness \
com.android.i18n:core-icu4j \
@@ -93,6 +94,7 @@
com.android.appsearch:service-appsearch \
com.android.art:service-art \
com.android.configinfrastructure:service-configinfrastructure \
+ com.android.crashrecovery:service-crashrecovery \
com.android.healthfitness:service-healthfitness \
com.android.media:service-media-s \
com.android.ondevicepersonalization:service-ondevicepersonalization \
diff --git a/target/product/fullmte.mk b/target/product/fullmte.mk
index d47c685..5726c06 100644
--- a/target/product/fullmte.mk
+++ b/target/product/fullmte.mk
@@ -20,7 +20,8 @@
# For more details, see:
# https://source.android.com/docs/security/test/memory-safety/arm-mte
ifeq ($(filter memtag_heap,$(SANITIZE_TARGET)),)
- SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap memtag_stack)
+ # TODO(b/292478827): Re-enable memtag_stack when new toolchain rolls.
+ SANITIZE_TARGET := $(strip $(SANITIZE_TARGET) memtag_heap)
SANITIZE_TARGET_DIAG := $(strip $(SANITIZE_TARGET_DIAG) memtag_heap)
endif
PRODUCT_PRODUCT_PROPERTIES += persist.arm64.memtag.default=sync
diff --git a/target/product/generic_system.mk b/target/product/generic_system.mk
index ab36eb1..6d40436 100644
--- a/target/product/generic_system.mk
+++ b/target/product/generic_system.mk
@@ -104,10 +104,6 @@
libpolicy-subsystem
-ifneq ($(KEEP_VNDK),true)
-PRODUCT_PACKAGES += llndk.libraries.txt
-endif
-
# Include all zygote init scripts. "ro.zygote" will select one of them.
PRODUCT_COPY_FILES += \
system/core/rootdir/init.zygote32.rc:system/etc/init/hw/init.zygote32.rc \
diff --git a/target/product/gsi/Android.mk b/target/product/gsi/Android.mk
index 655a666..fa3d1da 100644
--- a/target/product/gsi/Android.mk
+++ b/target/product/gsi/Android.mk
@@ -230,7 +230,9 @@
# Filter LLNDK libs moved to APEX to avoid pulling them into /system/LIB
LOCAL_REQUIRED_MODULES := \
- $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES))
+ $(filter-out $(LLNDK_MOVED_TO_APEX_LIBRARIES),$(LLNDK_LIBRARIES)) \
+ llndk.libraries.txt
+
include $(BUILD_PHONY_PACKAGE)
diff --git a/tools/aconfig/overrideflags/overrideflags.py b/tools/aconfig/overrideflags/overrideflags.py
new file mode 100644
index 0000000..e355c21
--- /dev/null
+++ b/tools/aconfig/overrideflags/overrideflags.py
@@ -0,0 +1,127 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Create Aconfig value building rules.
+
+This script will help to create Aconfig flag value building rules. It will
+parse necessary information in the value file to create the building rules, but
+it will not validate the value file. The validation will defer to the building
+system.
+"""
+
+import argparse
+import pathlib
+import re
+import sys
+
+
+_VALUE_LIST_TEMPLATE: str = """
+ACONFIG_VALUES_LIST_LOCAL = [{}]
+"""
+
+_ACONFIG_VALUES_TEMPLATE: str = """
+aconfig_values {{
+ name: "{}",
+ package: "{}",
+ srcs: [
+ "{}",
+ ]
+}}
+"""
+
+_ACONFIG_VALUES_NAME_SUFFIX: str = "aconfig-local-override-{}"
+
+_PACKAGE_REGEX = re.compile(r"^package\:\s*\"([\w\d\.]+)\"")
+_ANDROID_BP_FILE_NAME = r"Android.bp"
+
+
+def _parse_packages(file: pathlib.Path) -> set[str]:
+ packages = set()
+ with open(file) as f:
+ for line in f:
+ line = line.strip()
+ package_match = _PACKAGE_REGEX.match(line)
+ if package_match is None:
+ continue
+ package_name = package_match.group(1)
+ packages.add(package_name)
+
+ return packages
+
+
+def _create_android_bp(packages: set[str], file_name: str) -> str:
+ android_bp = ""
+ value_list = ",\n ".join(
+ map(f'"{_ACONFIG_VALUES_NAME_SUFFIX}"'.format, packages)
+ )
+ if value_list:
+ value_list = "\n " + value_list + "\n"
+ android_bp += _VALUE_LIST_TEMPLATE.format(value_list) + "\n"
+
+ for package in packages:
+ android_bp += _ACONFIG_VALUES_TEMPLATE.format(
+ _ACONFIG_VALUES_NAME_SUFFIX.format(package), package, file_name
+ )
+ android_bp += "\n"
+
+ return android_bp
+
+
+def _write_android_bp(new_android_bp: str, out: pathlib.Path) -> None:
+ if not out.is_dir():
+ out.mkdir(parents=True, exist_ok=True)
+
+ output = out.joinpath(_ANDROID_BP_FILE_NAME)
+ with open(output, "r+", encoding="utf8") as file:
+ lines = []
+ for line in file:
+ line = line.rstrip("\n")
+ if line.startswith("ACONFIG_VALUES_LIST_LOCAL"):
+ break
+ lines.append(line)
+ # Overwrite the file with the updated contents.
+ file.seek(0)
+ file.truncate()
+ file.write("\n".join(lines))
+ file.write(new_android_bp)
+
+
+def main(args):
+ """Program entry point."""
+ args_parser = argparse.ArgumentParser()
+ args_parser.add_argument(
+ "--overrides",
+ required=True,
+ help="The path to override file.",
+ )
+ args_parser.add_argument(
+ "--out",
+ required=True,
+ help="The path to output directory.",
+ )
+
+ args = args_parser.parse_args(args)
+ file = pathlib.Path(args.overrides)
+ out = pathlib.Path(args.out)
+ if not file.is_file():
+ raise FileNotFoundError(f"File '{file}' is not found")
+
+ packages = _parse_packages(file)
+ new_android_bp = _create_android_bp(packages, file.name)
+ _write_android_bp(new_android_bp, out)
+
+
+if __name__ == "__main__":
+ main(sys.argv[1:])
diff --git a/tools/overrideflags.sh b/tools/overrideflags.sh
new file mode 100755
index 0000000..b8605dc
--- /dev/null
+++ b/tools/overrideflags.sh
@@ -0,0 +1,99 @@
+#!/bin/bash -e
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+source $(cd $(dirname $BASH_SOURCE) &> /dev/null && pwd)/../shell_utils.sh
+require_top
+
+function print_help() {
+ echo -e "overrideflags is used to set default value for local build."
+ echo -e "\nOptions:"
+ echo -e "\t--release-config \tPath to release configuration directory. Required"
+ echo -e "\t--no-edit \tIf present, skip editing flag value file."
+ echo -e "\t-h/--help \tShow this help."
+}
+
+function main() {
+ while (($# > 0)); do
+ case $1 in
+ --release-config)
+ if [[ $# -le 1 ]]; then
+ echo "--release-config requires a path"
+ return 1
+ fi
+ local release_config_dir="$2"
+ shift 2
+ ;;
+ --no-edit)
+ local no_edit="true"
+ shift 1
+ ;;
+ -h|--help)
+ print_help
+ return
+ ;;
+ *)
+ echo "$1 is unrecognized"
+ print_help
+ return 1
+ ;;
+ esac
+ done
+
+
+
+ case $(uname -s) in
+ Darwin)
+ local host_arch=darwin-x86
+ ;;
+ Linux)
+ local host_arch=linux-x86
+ ;;
+ *)
+ >&2 echo Unknown host $(uname -s)
+ return
+ ;;
+ esac
+
+ if [[ -z "${release_config_dir}" ]]; then
+ echo "Please provide release configuration path by --release-config"
+ exit 1
+ elif [ ! -d "${release_config_dir}" ]; then
+ echo "${release_config_dir} is an invalid directory"
+ exit 1
+ fi
+ local T="$(gettop)"
+ local aconfig_dir="${T}"/build/make/tools/aconfig/
+ local overrideflag_py="${aconfig_dir}"/overrideflags/overrideflags.py
+ local overridefile="${release_config_dir}/aconfig/override_values.textproto"
+
+ # Edit override file
+ if [[ -z "${no_edit}" ]]; then
+ editor="${EDITOR:-$(which vim)}"
+
+ eval "${editor} ${overridefile}"
+ if [ $? -ne 0 ]; then
+ echo "Fail to set override values"
+ return 1
+ fi
+ fi
+
+ ${T}/prebuilts/build-tools/${host_arch}/bin/py3-cmd -u "${overrideflag_py}" \
+ --overrides "${overridefile}" \
+ --out "${release_config_dir}/aconfig"
+}
+
+
+main "$@"
diff --git a/tools/protos/metadata_file.proto b/tools/protos/metadata_file.proto
index ac1129a..47562c5 100644
--- a/tools/protos/metadata_file.proto
+++ b/tools/protos/metadata_file.proto
@@ -92,6 +92,8 @@
SBOMRef sbom_ref = 10;
}
+ // Identifiers for the package.
+ repeated Identifier identifier = 11;
}
// URL associated with a third-party package.
@@ -278,4 +280,136 @@
// https://spdx.github.io/spdx-spec/v2.3/package-information/#72-package-spdx-identifier-field or
// https://spdx.github.io/spdx-spec/v2.3/file-information/#82-file-spdx-identifier-field
optional string element_id = 3;
+}
+
+// Identifier for a third-package package.
+// See go/tp-metadata-id.
+message Identifier {
+ // The type of the identifier. Either an "ecosystem" value from
+ // https://ossf.github.io/osv-schema/#affectedpackage-field such as "Go",
+ // "npm" or "PyPI". The "value" and "version" fields follow the same rules as
+ // defined in the OSV spec.
+
+ // Or one of:
+ // - "Git": The "value" field is the URL of the upstream git repository this
+ // package is retrieved from.
+ // For example:
+ // - https://github.com/git/git
+ // - git://git.kernel.org/pub/scm/git/git
+ //
+ // Use of a git URL requires that the package "version" value must specify a
+ // specific git tag or revision. This must not be a branch name.
+ //
+ // - "SVN": The "value" field is the URL of the upstream SVN repository this
+ // package is retrieved from.
+ // For example:
+ // - http://llvm.org/svn/llvm-project/llvm/
+ //
+ // Use of an SVN URL requires that the package "version" value must specify
+ // a specific SVN tag or revision. This must not be a branch name.
+ //
+ // - "Hg": The "value" field is the URL of the upstream mercurial repository
+ // this package is retrieved from.
+ // For example:
+ // - https://mercurial-scm.org/repo/evolve
+ //
+ // Use of a mercurial URL requires that the package "version" value must
+ // specify a specific tag or revision. This must not be a branch name.
+ //
+ // - "Darcs": the "value" field is the URL of the upstream darcs repository
+ // this package is retrieved from.
+ // For example:
+ // - https://hub.darcs.net/hu.dwim/hu.dwim.util
+ //
+ // Use of a Darcs URL requires that the package "version" value must
+ // specify a specific tag or revision. This must not be a branch name.
+ //
+ // - "Piper": The "value" field is the URL of the upstream piper location.
+ // This is primarily used when a package is being migrated into third_party
+ // from elsewhere in Piper, or when a package is being newly developed in
+ // third_party.
+ //
+ // - "VCS": This is a generic fallback for an unlisted VCS system. The
+ // "value" field is the URL of the repository for this VCS.
+ //
+ // - "Archive": The "value" field is the URL of the archive containing the
+ // source code for the package, for example a zip or tgz file.
+ //
+ // - "PrebuiltByAlphabet": This type should be used for archives of primarily
+ // Google-owned source code (may contain non-Google-owned dependencies),
+ // which has been built using production Google infrastructure, and copied
+ // into third_party.
+ //
+ // - "LocalSource": The "value" field is the URL identifying where the local
+ // copy of the package source code can be found.
+ // Examples:
+ // - https://android.googlesource.com/platform/external/apache-http/
+ //
+ // Typically, the metadata files describing a package reside in the same
+ // directory as the source code for the package. In a few rare cases where
+ // they are separate, the LocalSource URL identifies where to find the
+ // source code. This only describes where to find the local copy of the
+ // source; there should always be an additional URL describing where the
+ // package was retrieved from.
+ //
+ // - "Other": An identifier that does not fit any other type. This may also
+ // indicate that the Source code was received via email or some other
+ // out-of-band way. This is most commonly used with commercial software
+ // received directly from the Vendor. In the case of email, the "value" field
+ // can be used to provide additional information about how it was received.
+ optional string type = 1;
+
+ // A human readable string to indicate why a third-package package does not
+ // have this identifier type set.
+ // Example:
+ // identifier {
+ // type: "PyPI"
+ // omission_reason: "Only on Git. Not published to PyPI."
+ // }
+ optional string omission_reason = 2;
+
+ // The value of the package identifier as defined by the "type".
+ // Example:
+ // identifier {
+ // type: "PyPI"
+ // value: "django"
+ // version: "3.2.8"
+ // }
+ optional string value = 3;
+
+ // The version associated with this package as defined by the "type".
+ // Example:
+ // identifier {
+ // type: "PyPI"
+ // value: "django"
+ // version: "3.2.8"
+ // }
+ optional string version = 4;
+
+ // The closest version associated with this package as defined by the "type".
+ // This should only be set by automated infrastructure by applying automated
+ // heuristics, such as the closest git tag or package version from a package
+ // manifest file (e.g. pom.xml).
+ //
+ // For most identifier types, only one of `version` or `closest_version`
+ // should be set (not both). The exception is source repository types such as
+ // "Git", where `version` will refer to a git commit, and `closest_version`
+ // refers to a git tag.
+ // Example:
+ // identifier {
+ // type: "Git",
+ // value: "https://github.com/my/repo"
+ // version: "e5fa44f2b31c1fb553b6021e7360d07d5d91ff5e"
+ // closest_version: "v1.4"
+ // }
+ optional string closest_version = 5;
+
+ // When `true`, this Identifier represents the location from which the source
+ // code for this package was originally obtained. This should only be set for
+ // *one* Identifier in a third_party package's METADATA.
+
+ // For external packages, this is typically for the Identifier associated
+ // with the version control system or package manager that was used to
+ // check out or download the code.
+ optional bool primary_source = 6;
}
\ No newline at end of file
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index bfc87b8..1ddffc1 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -68,7 +68,7 @@
self.avbtool = avbtool if avbtool else "avbtool"
self.sign_tool = sign_tool
- def ProcessApexFile(self, apk_keys, payload_key, signing_args=None, is_sepolicy=False):
+ def ProcessApexFile(self, apk_keys, payload_key, signing_args=None):
"""Scans and signs the payload files and repack the apex
Args:
@@ -86,13 +86,9 @@
'list', self.apex_path]
entries_names = common.RunAndCheckOutput(list_cmd).split()
apk_entries = [name for name in entries_names if name.endswith('.apk')]
- sepolicy_entries = []
- if is_sepolicy:
- sepolicy_entries = [name for name in entries_names if
- name.startswith('./etc/SEPolicy') and name.endswith('.zip')]
# No need to sign and repack, return the original apex path.
- if not apk_entries and not sepolicy_entries and self.sign_tool is None:
+ if not apk_entries and self.sign_tool is None:
logger.info('No apk file to sign in %s', self.apex_path)
return self.apex_path
@@ -108,14 +104,14 @@
' %s', entry)
payload_dir, has_signed_content = self.ExtractApexPayloadAndSignContents(
- apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args)
+ apk_entries, apk_keys, payload_key, signing_args)
if not has_signed_content:
- logger.info('No contents have been signed in %s', self.apex_path)
+ logger.info('No contents has been signed in %s', self.apex_path)
return self.apex_path
return self.RepackApexPayload(payload_dir, payload_key, signing_args)
- def ExtractApexPayloadAndSignContents(self, apk_entries, sepolicy_entries, apk_keys, payload_key, signing_args):
+ def ExtractApexPayloadAndSignContents(self, apk_entries, apk_keys, payload_key, signing_args):
"""Extracts the payload image and signs the containing apk files."""
if not os.path.exists(self.debugfs_path):
raise ApexSigningError(
@@ -133,11 +129,11 @@
'extract',
self.apex_path, payload_dir]
common.RunAndCheckOutput(extract_cmd)
- assert os.path.exists(self.apex_path)
has_signed_content = False
for entry in apk_entries:
apk_path = os.path.join(payload_dir, entry)
+ assert os.path.exists(self.apex_path)
key_name = apk_keys.get(os.path.basename(entry))
if key_name in common.SPECIAL_CERT_STRINGS:
@@ -154,37 +150,6 @@
codename_to_api_level_map=self.codename_to_api_level_map)
has_signed_content = True
- for entry in sepolicy_entries:
- sepolicy_path = os.path.join(payload_dir, entry)
-
- if not 'etc' in entry:
- logger.warning('Sepolicy path does not contain the intended directory name etc:'
- ' %s', entry)
-
- key_name = apk_keys.get(os.path.basename(entry))
- if key_name is None:
- logger.warning('Failed to find signing keys for {} in'
- ' apex {}, payload key will be used instead.'
- ' Use "-e <name>=" to specify a key'
- .format(entry, self.apex_path))
- key_name = payload_key
-
- if key_name in common.SPECIAL_CERT_STRINGS:
- logger.info('Not signing: %s due to special cert string', sepolicy_path)
- continue
-
- if OPTIONS.sign_sepolicy_path is not None:
- sig_path = os.path.join(payload_dir, sepolicy_path + '.sig')
- fsv_sig_path = os.path.join(payload_dir, sepolicy_path + '.fsv_sig')
- old_sig = common.MakeTempFile()
- old_fsv_sig = common.MakeTempFile()
- os.rename(sig_path, old_sig)
- os.rename(fsv_sig_path, old_fsv_sig)
-
- logger.info('Signing sepolicy file %s in apex %s', sepolicy_path, self.apex_path)
- if common.SignSePolicy(sepolicy_path, key_name, self.key_passwords.get(key_name)):
- has_signed_content = True
-
if self.sign_tool:
logger.info('Signing payload contents in apex %s with %s', self.apex_path, self.sign_tool)
# Pass avbtool to the custom signing tool
@@ -368,8 +333,7 @@
def SignUncompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None,
- is_sepolicy=False):
+ no_hashtree, signing_args=None, sign_tool=None):
"""Signs the current uncompressed APEX with the given payload/container keys.
Args:
@@ -382,7 +346,6 @@
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
sign_tool: A tool to sign the contents of the APEX.
- is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -392,8 +355,7 @@
apk_signer = ApexApkSigner(apex_file, container_pw,
codename_to_api_level_map,
avbtool, sign_tool)
- apex_file = apk_signer.ProcessApexFile(
- apk_keys, payload_key, signing_args, is_sepolicy)
+ apex_file = apk_signer.ProcessApexFile(apk_keys, payload_key, signing_args)
# 2a. Extract and sign the APEX_PAYLOAD_IMAGE entry with the given
# payload_key.
@@ -447,8 +409,7 @@
def SignCompressedApex(avbtool, apex_file, payload_key, container_key,
container_pw, apk_keys, codename_to_api_level_map,
- no_hashtree, signing_args=None, sign_tool=None,
- is_sepolicy=False):
+ no_hashtree, signing_args=None, sign_tool=None):
"""Signs the current compressed APEX with the given payload/container keys.
Args:
@@ -460,7 +421,6 @@
codename_to_api_level_map: A dict that maps from codename to API level.
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
- is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -487,8 +447,7 @@
codename_to_api_level_map,
no_hashtree,
signing_args,
- sign_tool,
- is_sepolicy)
+ sign_tool)
# 3. Compress signed original apex.
compressed_apex_file = common.MakeTempFile(prefix='apex-container-',
@@ -515,8 +474,8 @@
def SignApex(avbtool, apex_data, payload_key, container_key, container_pw,
- apk_keys, codename_to_api_level_map, no_hashtree,
- signing_args=None, sign_tool=None, is_sepolicy=False):
+ apk_keys, codename_to_api_level_map,
+ no_hashtree, signing_args=None, sign_tool=None):
"""Signs the current APEX with the given payload/container keys.
Args:
@@ -528,7 +487,6 @@
codename_to_api_level_map: A dict that maps from codename to API level.
no_hashtree: Don't include hashtree in the signed APEX.
signing_args: Additional args to be passed to the payload signer.
- is_sepolicy: Indicates if the apex is a sepolicy.apex
Returns:
The path to the signed APEX file.
@@ -554,8 +512,7 @@
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool,
- is_sepolicy=is_sepolicy)
+ sign_tool=sign_tool)
elif apex_type == 'COMPRESSED':
return SignCompressedApex(
avbtool,
@@ -567,8 +524,7 @@
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool,
- is_sepolicy=is_sepolicy)
+ sign_tool=sign_tool)
else:
# TODO(b/172912232): support signing compressed apex
raise ApexInfoError('Unsupported apex type {}'.format(apex_type))
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 5e4130c..34b7172 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -353,6 +353,8 @@
if compress_hints:
build_command.extend(["--compress-hints", compress_hints])
+ build_command.extend(["-b", prop_dict.get("erofs_blocksize", "4096")])
+
build_command.extend(["--mount-point", prop_dict["mount_point"]])
if target_out:
build_command.extend(["--product-out", target_out])
@@ -711,6 +713,7 @@
"erofs_default_compressor",
"erofs_default_compress_hints",
"erofs_pcluster_size",
+ "erofs_blocksize",
"erofs_share_dup_blocks",
"erofs_sparse_flag",
"erofs_use_legacy_compression",
@@ -762,6 +765,7 @@
(True, "{}_erofs_compressor", "erofs_compressor"),
(True, "{}_erofs_compress_hints", "erofs_compress_hints"),
(True, "{}_erofs_pcluster_size", "erofs_pcluster_size"),
+ (True, "{}_erofs_blocksize", "erofs_blocksize"),
(True, "{}_erofs_share_dup_blocks", "erofs_share_dup_blocks"),
(True, "{}_extfs_inode_count", "extfs_inode_count"),
(True, "{}_f2fs_compress", "f2fs_compress"),
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 0f3c430..8ee983f 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -75,9 +75,7 @@
if "ANDROID_HOST_OUT" in os.environ:
self.search_path = os.environ["ANDROID_HOST_OUT"]
self.signapk_shared_library_path = "lib64" # Relative to search_path
- self.sign_sepolicy_path = None
self.extra_signapk_args = []
- self.extra_sign_sepolicy_args = []
self.aapt2_path = "aapt2"
self.java_path = "java" # Use the one on the path by default.
self.java_args = ["-Xmx4096m"] # The default JVM args.
@@ -97,7 +95,6 @@
self.cache_size = None
self.stash_threshold = 0.8
self.logfile = None
- self.sepolicy_name = 'sepolicy.apex'
OPTIONS = Options()
@@ -2629,38 +2626,6 @@
proc.returncode, stdoutdata))
-def SignSePolicy(sepolicy, key, password):
- """Sign the sepolicy zip, producing an fsverity .fsv_sig and
- an RSA .sig signature files.
- """
-
- if OPTIONS.sign_sepolicy_path is None:
- logger.info("No sign_sepolicy_path specified, %s was not signed", sepolicy)
- return False
-
- java_library_path = os.path.join(
- OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
-
- cmd = ([OPTIONS.java_path] + OPTIONS.java_args +
- ["-Djava.library.path=" + java_library_path,
- "-jar", os.path.join(OPTIONS.search_path, OPTIONS.sign_sepolicy_path)] +
- OPTIONS.extra_sign_sepolicy_args)
-
- cmd.extend([key + OPTIONS.public_key_suffix,
- key + OPTIONS.private_key_suffix,
- sepolicy, os.path.dirname(sepolicy)])
-
- proc = Run(cmd, stdin=subprocess.PIPE)
- if password is not None:
- password += "\n"
- stdoutdata, _ = proc.communicate(password)
- if proc.returncode != 0:
- raise ExternalError(
- "Failed to run sign sepolicy: return code {}:\n{}".format(
- proc.returncode, stdoutdata))
- return True
-
-
def CheckSize(data, target, info_dict):
"""Checks the data string passed against the max size limit.
@@ -2836,8 +2801,7 @@
opts, args = getopt.getopt(
argv, "hvp:s:x:" + extra_opts,
["help", "verbose", "path=", "signapk_path=",
- "signapk_shared_library_path=", "extra_signapk_args=",
- "sign_sepolicy_path=", "extra_sign_sepolicy_args=", "aapt2_path=",
+ "signapk_shared_library_path=", "extra_signapk_args=", "aapt2_path=",
"java_path=", "java_args=", "android_jar_path=", "public_key_suffix=",
"private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
"verity_signer_path=", "verity_signer_args=", "device_specific=",
@@ -2861,10 +2825,6 @@
OPTIONS.signapk_shared_library_path = a
elif o in ("--extra_signapk_args",):
OPTIONS.extra_signapk_args = shlex.split(a)
- elif o in ("--sign_sepolicy_path",):
- OPTIONS.sign_sepolicy_path = a
- elif o in ("--extra_sign_sepolicy_args",):
- OPTIONS.extra_sign_sepolicy_args = shlex.split(a)
elif o in ("--aapt2_path",):
OPTIONS.aapt2_path = a
elif o in ("--java_path",):
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 7be9876..de0e187 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -995,7 +995,7 @@
metadata = GetPackageMetadata(target_info, source_info)
# Generate payload.
payload = PayloadGenerator(
- wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial)
+ wipe_user_data=OPTIONS.wipe_user_data, minor_version=OPTIONS.force_minor_version, is_partial_update=OPTIONS.partial, spl_downgrade=OPTIONS.spl_downgrade)
partition_timestamps_flags = []
# Enforce a max timestamp this payload can be applied on top of.
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index 5c70223..6ca9d64 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -791,7 +791,7 @@
SECONDARY_PAYLOAD_BIN = 'secondary/payload.bin'
SECONDARY_PAYLOAD_PROPERTIES_TXT = 'secondary/payload_properties.txt'
- def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False):
+ def __init__(self, secondary=False, wipe_user_data=False, minor_version=None, is_partial_update=False, spl_downgrade=False):
"""Initializes a Payload instance.
Args:
@@ -803,6 +803,7 @@
self.wipe_user_data = wipe_user_data
self.minor_version = minor_version
self.is_partial_update = is_partial_update
+ self.spl_downgrade = spl_downgrade
def _Run(self, cmd): # pylint: disable=no-self-use
# Don't pipe (buffer) the output if verbose is set. Let
@@ -912,13 +913,15 @@
"--properties_file=" + properties_file]
self._Run(cmd)
- if self.secondary:
- with open(properties_file, "a") as f:
- f.write("SWITCH_SLOT_ON_REBOOT=0\n")
- if self.wipe_user_data:
- with open(properties_file, "a") as f:
+ with open(properties_file, "a") as f:
+ if self.wipe_user_data:
f.write("POWERWASH=1\n")
+ if self.secondary:
+ f.write("SWITCH_SLOT_ON_REBOOT=0\n")
+ if self.spl_downgrade:
+ f.write("SPL_DOWNGRADE=1\n")
+
self.payload_properties = properties_file
diff --git a/tools/releasetools/sign_apex.py b/tools/releasetools/sign_apex.py
index d739982..a0a94f6 100755
--- a/tools/releasetools/sign_apex.py
+++ b/tools/releasetools/sign_apex.py
@@ -56,7 +56,6 @@
import common
logger = logging.getLogger(__name__)
-OPTIONS = common.OPTIONS
def SignApexFile(avbtool, apex_file, payload_key, container_key, no_hashtree,
@@ -75,8 +74,7 @@
no_hashtree=no_hashtree,
apk_keys=apk_keys,
signing_args=signing_args,
- sign_tool=sign_tool,
- is_sepolicy=apex_file.endswith(OPTIONS.sepolicy_name))
+ sign_tool=sign_tool)
def main(argv):
diff --git a/tools/releasetools/test_sign_apex.py b/tools/releasetools/test_sign_apex.py
index 7723de7..8470f20 100644
--- a/tools/releasetools/test_sign_apex.py
+++ b/tools/releasetools/test_sign_apex.py
@@ -59,21 +59,6 @@
self.assertTrue(os.path.exists(signed_test_apex))
@test_utils.SkipIfExternalToolsUnavailable()
- def test_SignSepolicyApex(self):
- test_apex = os.path.join(self.testdata_dir, 'sepolicy.apex')
- payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
- container_key = os.path.join(self.testdata_dir, 'testkey')
- apk_keys = {'SEPolicy-33.zip': os.path.join(self.testdata_dir, 'testkey')}
- signed_test_apex = sign_apex.SignApexFile(
- 'avbtool',
- test_apex,
- payload_key,
- container_key,
- False,
- None)
- self.assertTrue(os.path.exists(signed_test_apex))
-
- @test_utils.SkipIfExternalToolsUnavailable()
def test_SignCompressedApexFile(self):
apex = os.path.join(test_utils.get_current_dir(), 'com.android.apex.compressed.v1.capex')
payload_key = os.path.join(self.testdata_dir, 'testkey_RSA4096.key')
diff --git a/tools/releasetools/testdata/sepolicy.apex b/tools/releasetools/testdata/sepolicy.apex
deleted file mode 100644
index 2c646cd..0000000
--- a/tools/releasetools/testdata/sepolicy.apex
+++ /dev/null
Binary files differ
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
index 4d090eb..a2b33b5 100755
--- a/tools/sbom/generate-sbom.py
+++ b/tools/sbom/generate-sbom.py
@@ -82,6 +82,46 @@
'vndk_prebuilt_shared',
]
+THIRD_PARTY_IDENTIFIER_TYPES = [
+ # Types defined in metadata_file.proto
+ 'Git',
+ 'SVN',
+ 'Hg',
+ 'Darcs',
+ 'VCS',
+ 'Archive',
+ 'PrebuiltByAlphabet',
+ 'LocalSource',
+ 'Other',
+ # OSV ecosystems defined at https://ossf.github.io/osv-schema/#affectedpackage-field.
+ 'Go',
+ 'npm',
+ 'OSS-Fuzz',
+ 'PyPI',
+ 'RubyGems',
+ 'crates.io',
+ 'Hackage',
+ 'GHC',
+ 'Packagist',
+ 'Maven',
+ 'NuGet',
+ 'Linux',
+ 'Debian',
+ 'Alpine',
+ 'Hex',
+ 'Android',
+ 'GitHub Actions',
+ 'Pub',
+ 'ConanCenter',
+ 'Rocky Linux',
+ 'AlmaLinux',
+ 'Bitnami',
+ 'Photon OS',
+ 'CRAN',
+ 'Bioconductor',
+ 'SwiftURL'
+]
+
def get_args():
parser = argparse.ArgumentParser()
@@ -346,6 +386,20 @@
return True
+# Validate identifiers in a package's METADATA.
+# 1) Only known identifier type is allowed
+# 2) Only one identifier's primary_source can be true
+def validate_package_metadata(metadata_file_path, package_metadata):
+ primary_source_found = False
+ for identifier in package_metadata.third_party.identifier:
+ if identifier.type not in THIRD_PARTY_IDENTIFIER_TYPES:
+ sys.exit(f'Unknown value of third_party.identifier.type in {metadata_file_path}/METADATA: {identifier.type}.')
+ if primary_source_found and identifier.primary_source:
+ sys.exit(
+ f'Field "primary_source" is set to true in multiple third_party.identifier in {metadata_file_path}/METADATA.')
+ primary_source_found = identifier.primary_source
+
+
def report_metadata_file(metadata_file_path, installed_file_metadata, report):
if metadata_file_path:
report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
@@ -358,6 +412,8 @@
with open(metadata_file_path + '/METADATA', 'rt') as f:
text_format.Parse(f.read(), package_metadata)
+ validate_package_metadata(metadata_file_path, package_metadata)
+
if not metadata_file_path in metadata_file_protos:
metadata_file_protos[metadata_file_path] = package_metadata
if not package_metadata.name:
diff --git a/tools/signapk/src/com/android/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
index 25c53d3..2f2b833 100644
--- a/tools/signapk/src/com/android/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -687,7 +687,7 @@
if (entryName.endsWith(".so")) {
// Align .so contents to memory page boundary to enable memory-mapped
// execution.
- return 4096;
+ return 16384;
} else {
return defaultAlignment;
}