Merge "Introduce dist-for-goals-with-filenametag"
diff --git a/core/Makefile b/core/Makefile
index 85f33bc..f7363a1 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -17,6 +17,52 @@
SYSTEM_DLKM_NOTICE_DEPS :=
# -----------------------------------------------------------------
+# Release Config Flags
+
+# Create a summary file of build flags for each partition
+# $(1): build flags json file
+# $(2): flag names
+define generate-partition-build-flag-file
+$(eval $(strip $(1)): PRIVATE_OUT := $(strip $(1)))
+$(eval $(strip $(1)): PRIVATE_FLAG_NAMES := $(strip $(2)))
+$(strip $(1)):
+ mkdir -p $$(dir $$(PRIVATE_OUT))
+ ( \
+ echo '{' ; \
+ echo 'flags: [' ; \
+ $$(foreach flag, $$(PRIVATE_FLAG_NAMES), \
+ printf ' { "name": "%s", "value": "%s", ' \
+ '$$(flag)' \
+ '$$(_ALL_RELEASE_FLAGS.$$(flag).VALUE)' \
+ ; \
+ printf '"set": "%s", "default": "%s", "declared": "%s", }' \
+ '$$(_ALL_RELEASE_FLAGS.$$(flag).SET_IN)' \
+ '$$(_ALL_RELEASE_FLAGS.$$(flag).DEFAULT)' \
+ '$$(_ALL_RELEASE_FLAGS.$$(flag).DECLARED_IN)' \
+ ; \
+ printf '$$(if $$(filter $$(lastword $$(PRIVATE_FLAG_NAMES)),$$(flag)),,$$(comma))\n' ; \
+ ) \
+ echo "]" ; \
+ echo "}" \
+ ) >> $$(PRIVATE_OUT)
+endef
+
+$(foreach partition, $(_FLAG_PARTITIONS), \
+ $(eval BUILD_FLAG_SUMMARIES.$(partition) \
+ := $(TARGET_OUT_FLAGS)/$(partition)/etc/build_flags.json) \
+ $(eval $(call generate-partition-build-flag-file, \
+ $(BUILD_FLAG_SUMMARIES.$(partition)), \
+ $(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) \
+ ) \
+ ) \
+)
+
+# TODO: Remove
+.PHONY: flag-files
+flag-files: $(foreach partition, $(_FLAG_PARTITIONS), \
+ $(TARGET_OUT_FLAGS)/$(partition)/etc/build_flags.json)
+
+# -----------------------------------------------------------------
# Define rules to copy PRODUCT_COPY_FILES defined by the product.
# PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
# <dest file> is relative to $(PRODUCT_OUT), so it should look like,
@@ -530,6 +576,24 @@
$(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_OUT_VENDOR))))
endef
+# $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-ramdisk-charger-load
+$(if $(filter top,$(1)),\
+ $(eval _kver :=)$(eval _sep :=),\
+ $(eval _kver := $(1))$(eval _sep :=_))\
+ $(if $(BOARD_VENDOR_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+ $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_ramdisk_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_VENDOR_RAMDISK_OUT))))
+endef
+
+# $(1): kernel module directory name (top is an out of band value for no directory)
+define build-vendor-kernel-ramdisk-charger-load
+$(if $(filter top,$(1)),\
+ $(eval _kver :=)$(eval _sep :=),\
+ $(eval _kver := $(1))$(eval _sep :=_))\
+ $(if $(BOARD_VENDOR_KERNEL_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),\
+ $(call copy-many-files,$(call module-load-list-copy-paths,$(call intermediates-dir-for,PACKAGING,vendor_kernel_ramdisk_charger_module_list$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_KERNEL_MODULES$(_sep)$(_kver)),$(BOARD_VENDOR_KERNEL_RAMDISK_CHARGER_KERNEL_MODULES_LOAD$(_sep)$(_kver)),modules.load.charger,$(TARGET_VENDOR_KERNEL_RAMDISK_OUT))))
+endef
+
ifneq ($(BUILDING_VENDOR_BOOT_IMAGE),true)
# If there is no vendor boot partition, store vendor ramdisk kernel modules in the
# boot ramdisk.
@@ -595,6 +659,8 @@
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-recovery-load,$(kmd))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,VENDOR,$(if $(filter true,$(BOARD_USES_VENDOR_DLKMIMAGE)),$(TARGET_OUT_VENDOR_DLKM),$(TARGET_OUT_VENDOR)),vendor,modules.load,$(VENDOR_STRIPPED_MODULE_STAGING_DIR),$(kmd),$(BOARD_SYSTEM_KERNEL_MODULES),system)) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-charger-load,$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-ramdisk-charger-load,$(kmd))) \
+ $(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-vendor-kernel-ramdisk-charger-load,$(kmd))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,ODM,$(if $(filter true,$(BOARD_USES_ODM_DLKMIMAGE)),$(TARGET_OUT_ODM_DLKM),$(TARGET_OUT_ODM)),odm,modules.load,,$(kmd))) \
$(eval ALL_DEFAULT_INSTALLED_MODULES += $(call build-image-kernel-modules-dir,SYSTEM,$(if $(filter true,$(BOARD_USES_SYSTEM_DLKMIMAGE)),$(TARGET_OUT_SYSTEM_DLKM),$(TARGET_OUT_SYSTEM)),system,modules.load,,$(kmd))) \
$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
@@ -935,6 +1001,42 @@
BUILT_RAMDISK_TARGET := $(PRODUCT_OUT)/ramdisk.img
+
+ifneq ($(BOARD_KERNEL_MODULES_16K),)
+
+TARGET_OUT_RAMDISK_16K := $(PRODUCT_OUT)/ramdisk_16k
+BUILT_RAMDISK_16K_TARGET := $(PRODUCT_OUT)/ramdisk_16k.img
+RAMDISK_16K_STAGING_DIR := $(call intermediates-dir-for,PACKAGING,depmod_ramdisk_16k)
+
+$(BUILT_RAMDISK_16K_TARGET): $(DEPMOD) $(MKBOOTFS)
+$(BUILT_RAMDISK_16K_TARGET): $(call copy-many-files,$(foreach file,$(BOARD_KERNEL_MODULES_16K),$(file):$(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/$(notdir $(file))))
+ $(DEPMOD) -b $(RAMDISK_16K_STAGING_DIR) 0.0
+ for MODULE in $(BOARD_KERNEL_MODULES_16K); do \
+ basename $$MODULE >> $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0/modules.load ; \
+ done;
+ mkdir -p $(TARGET_OUT_RAMDISK_16K)/lib
+ rm -rf $(TARGET_OUT_RAMDISK_16K)/lib/modules
+ cp -r $(RAMDISK_16K_STAGING_DIR)/lib/modules/0.0 $(TARGET_OUT_RAMDISK_16K)/lib/modules
+ $(MKBOOTFS) $(TARGET_OUT_RAMDISK_16K) > $@
+
+# Builds a ramdisk using modules defined in BOARD_KERNEL_MODULES_16K
+ramdisk_16k: $(BUILT_RAMDISK_16K_TARGET)
+.PHONY: ramdisk_16k
+
+endif
+
+ifneq ($(BOARD_KERNEL_PATH_16K),)
+BUILT_KERNEL_16K_TARGET := $(PRODUCT_OUT)/kernel_16k
+
+$(eval $(call copy-one-file,$(BOARD_KERNEL_PATH_16K),$(BUILT_KERNEL_16K_TARGET)))
+
+# Copies BOARD_KERNEL_PATH_16K to output directory as is
+kernel_16k: $(BUILT_KERNEL_16K_TARGET)
+.PHONY: kernel_16k
+
+endif
+
+
ifeq ($(BOARD_RAMDISK_USE_LZ4),true)
# -l enables the legacy format used by the Linux kernel
COMPRESSION_COMMAND_DEPS := $(LZ4)
@@ -3288,8 +3390,8 @@
endif # BUILDING_SYSTEM_IMAGE
-.PHONY: sync syncsys
-sync syncsys: $(INTERNAL_SYSTEMIMAGE_FILES)
+.PHONY: sync syncsys sync_system
+sync syncsys sync_system: $(INTERNAL_SYSTEMIMAGE_FILES)
# -----------------------------------------------------------------
# Old PDK fusion targets
@@ -3617,7 +3719,8 @@
vendorimage-nodeps vnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-vendorimage-target)
-sync: $(INTERNAL_VENDORIMAGE_FILES)
+.PHONY: sync_vendor
+sync sync_vendor: $(INTERNAL_VENDORIMAGE_FILES)
else ifdef BOARD_PREBUILT_VENDORIMAGE
INSTALLED_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
@@ -3681,7 +3784,8 @@
productimage-nodeps pnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-productimage-target)
-sync: $(INTERNAL_PRODUCTIMAGE_FILES)
+.PHONY: sync_product
+sync sync_product: $(INTERNAL_PRODUCTIMAGE_FILES)
else ifdef BOARD_PREBUILT_PRODUCTIMAGE
INSTALLED_PRODUCTIMAGE_TARGET := $(PRODUCT_OUT)/product.img
@@ -3743,7 +3847,8 @@
systemextimage-nodeps senod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-system_extimage-target)
-sync: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+.PHONY: sync_system_ext
+sync sync_system_ext: $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
INSTALLED_SYSTEM_EXTIMAGE_TARGET := $(PRODUCT_OUT)/system_ext.img
@@ -3824,7 +3929,8 @@
odmimage-nodeps onod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-odmimage-target)
-sync: $(INTERNAL_ODMIMAGE_FILES)
+.PHONY: sync_odm
+sync sync_odm: $(INTERNAL_ODMIMAGE_FILES)
else ifdef BOARD_PREBUILT_ODMIMAGE
INSTALLED_ODMIMAGE_TARGET := $(PRODUCT_OUT)/odm.img
@@ -3885,7 +3991,8 @@
vendor_dlkmimage-nodeps vdnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-vendor_dlkmimage-target)
-sync: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+.PHONY: sync_vendor_dlkm
+sync sync_vendor_dlkm: $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
INSTALLED_VENDOR_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/vendor_dlkm.img
@@ -3946,7 +4053,8 @@
odm_dlkmimage-nodeps odnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-odm_dlkmimage-target)
-sync: $(INTERNAL_ODM_DLKMIMAGE_FILES)
+.PHONY: sync_odm_dlkm
+sync sync_odm_dlkm: $(INTERNAL_ODM_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
INSTALLED_ODM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/odm_dlkm.img
@@ -4009,7 +4117,8 @@
system_dlkmimage-nodeps sdnod: | $(INTERNAL_USERIMAGES_DEPS)
$(build-system_dlkmimage-target)
-sync: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+.PHONY: sync_system_dlkm
+sync sync_system_dlkm: $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
INSTALLED_SYSTEM_DLKMIMAGE_TARGET := $(PRODUCT_OUT)/system_dlkm.img
@@ -5122,6 +5231,7 @@
check_target_files_signatures \
check_target_files_vintf \
checkvintf \
+ create_brick_ota \
delta_generator \
e2fsck \
e2fsdroid \
@@ -5143,6 +5253,7 @@
lz4 \
make_f2fs \
make_f2fs_casefold \
+ merge_ota \
merge_target_files \
minigzip \
mk_combined_img \
@@ -5271,6 +5382,62 @@
endif # build_otatools_package
# -----------------------------------------------------------------
+# fastboot-info.txt
+FASTBOOT_INFO_VERSION = 1
+
+INSTALLED_FASTBOOT_INFO_TARGET := $(PRODUCT_OUT)/fastboot-info.txt
+
+$(INSTALLED_FASTBOOT_INFO_TARGET):
+ rm -f $@
+ $(call pretty,"Target fastboot-info.txt: $@")
+ $(hide) echo "# fastboot-info for $(TARGET_PRODUCT)" >> $@
+ $(hide) echo "version $(FASTBOOT_INFO_VERSION)" >> $@
+ifneq ($(INSTALLED_BOOTIMAGE_TARGET),)
+ $(hide) echo "flash boot" >> $@
+endif
+ifneq ($(INSTALLED_INIT_BOOT_IMAGE_TARGET),)
+ $(hide) echo "flash init_boot" >> $@
+endif
+ifdef BOARD_PREBUILT_DTBOIMAGE
+ $(hide) echo "flash dtbo" >> $@
+endif
+ifeq ($(BOARD_USES_PVMFWIMAGE),true)
+ $(hide) echo "flash pvmfw" >> $@
+endif
+ifeq ($(BOARD_AVB_ENABLE),true)
+ifeq ($(BUILDING_VBMETA_IMAGE),true)
+ $(hide) echo "flash --apply-vbmeta vbmeta" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_SYSTEM)))
+ $(hide) echo "flash --apply-vbmeta vbmeta_system" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_VENDOR)))
+ $(hide) echo "flash --apply-vbmeta vbmeta_vendor" >> $@
+endif
+ifneq ($(INSTALLED_VENDOR_BOOTIMAGE_TARGET),)
+ $(hide) echo "flash vendor_boot" >> $@
+endif
+ifneq (,$(strip $(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS)))
+ $(hide) $(foreach partition,$(BOARD_AVB_VBMETA_CUSTOM_PARTITIONS), \
+ echo "flash --apply-vbmeta vbmeta_$(partition)" >> $@;)
+endif
+endif # BOARD_AVB_ENABLE
+ $(hide) echo "reboot fastboot" >> $@
+ $(hide) echo "update-super" >> $@
+ $(hide) $(foreach partition,$(BOARD_SUPER_PARTITION_PARTITION_LIST), \
+ echo "flash $(partition)" >> $@;)
+ifdef BUILDING_SYSTEM_OTHER_IMAGE
+ $(hide) echo "flash --slot-other system system_other.img" >> $@
+endif
+ifdef BUILDING_CACHE_IMAGE
+ $(hide) echo "if-wipe erase cache" >> $@
+endif
+ $(hide) echo "if-wipe erase userdata" >> $@
+ifeq ($(BOARD_USES_METADATA_PARTITION),true)
+ $(hide) echo "if-wipe erase metadata" >> $@
+endif
+
+# -----------------------------------------------------------------
# misc_info.txt
INSTALLED_MISC_INFO_TARGET := $(PRODUCT_OUT)/misc_info.txt
@@ -5520,6 +5687,13 @@
$(hide) echo "target_flatten_apex=false" >> $@
endif
+$(call declare-0p-target,$(INSTALLED_FASTBOOT_INFO_TARGET))
+
+.PHONY: fastboot_info
+fastboot_info: $(INSTALLED_FASTBOOT_INFO_TARGET)
+
+droidcore-unbundled: $(INSTALLED_FASTBOOT_INFO_TARGET)
+
$(call declare-0p-target,$(INSTALLED_MISC_INFO_TARGET))
.PHONY: misc_info
@@ -5539,10 +5713,12 @@
name := $(name)-target_files-$(FILE_NAME_TAG)
intermediates := $(call intermediates-dir-for,PACKAGING,target_files)
+BUILT_TARGET_FILES_DIR := $(intermediates)/$(name).zip.list
BUILT_TARGET_FILES_PACKAGE := $(intermediates)/$(name).zip
-$(BUILT_TARGET_FILES_PACKAGE): intermediates := $(intermediates)
-$(BUILT_TARGET_FILES_PACKAGE): \
- zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_PACKAGE): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): zip_root := $(intermediates)/$(name)
+$(BUILT_TARGET_FILES_DIR): intermediates := $(intermediates)
+
# $(1): Directory to copy
# $(2): Location to copy it to
@@ -5562,10 +5738,10 @@
$(call intermediates-dir-for,EXECUTABLES,updater)/updater
endif
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_OTA_TOOLS := $(built_ota_tools)
tool_extension := $(wildcard $(tool_extensions)/releasetools.py)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_TOOL_EXTENSION := $(tool_extension)
+$(BUILT_TARGET_FILES_DIR): PRIVATE_TOOL_EXTENSION := $(tool_extension)
updater_dep :=
ifeq ($(AB_OTA_UPDATER),true)
@@ -5581,23 +5757,23 @@
updater_dep += $(built_ota_tools)
endif
-$(BUILT_TARGET_FILES_PACKAGE): $(updater_dep)
+$(BUILT_TARGET_FILES_DIR): $(updater_dep)
# If we are using recovery as boot, output recovery files to BOOT/.
# If we are moving recovery resources to vendor_boot, output recovery files to VENDOR_BOOT/.
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := BOOT
else ifeq ($(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT),true)
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := VENDOR_BOOT
else
-$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
+$(BUILT_TARGET_FILES_DIR): PRIVATE_RECOVERY_OUT := RECOVERY
endif
ifeq ($(AB_OTA_UPDATER),true)
ifdef OSRELEASED_DIRECTORY
- $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
- $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
- $(BUILT_TARGET_FILES_PACKAGE): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
+ $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_id
+ $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_OEM)/$(OSRELEASED_DIRECTORY)/product_version
+ $(BUILT_TARGET_FILES_DIR): $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/system_version
endif
# Not checking in board_config.mk, since AB_OTA_PARTITIONS may be updated in Android.mk (e.g. to
@@ -5699,34 +5875,36 @@
echo "virtual_ab_compression_method=$(PRODUCT_VIRTUAL_AB_COMPRESSION_METHOD)" >> $(1))
$(if $(filter true,$(PRODUCT_VIRTUAL_AB_OTA_RETROFIT)), \
echo "virtual_ab_retrofit=true" >> $(1))
+ $(if $(PRODUCT_VIRTUAL_AB_COW_VERSION), \
+ echo "virtual_ab_cow_version=$(PRODUCT_VIRTUAL_AB_COW_VERSION)" >> $(1))
endef
# By conditionally including the dependency of the target files package on the
# full system image deps, we speed up builds that do not build the system
# image.
ifdef BUILDING_SYSTEM_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(FULL_SYSTEMIMAGE_DEPS)
+ $(BUILT_TARGET_FILES_DIR): $(FULL_SYSTEMIMAGE_DEPS)
else
# releasetools may need the system build.prop even when building a
# system-image-less product.
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BUILD_PROP_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BUILD_PROP_TARGET)
endif
ifdef BUILDING_USERDATA_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_USERDATAIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_USERDATAIMAGE_FILES)
endif
ifdef BUILDING_SYSTEM_OTHER_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEMOTHERIMAGE_FILES)
endif
ifdef BUILDING_VENDOR_BOOT_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FILES)
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_RAMDISK_FRAGMENT_TARGETS)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_BOOTCONFIG_TARGET)
# The vendor ramdisk may be built from the recovery ramdisk.
ifeq (true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT))
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERY_RAMDISK_FILES_TIMESTAMP)
endif
endif
@@ -5736,11 +5914,11 @@
# commands in build-recoveryimage-target, which would touch the files under
# TARGET_RECOVERY_OUT and race with packaging target-files.zip.
ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
else
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_RECOVERYIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_RECOVERYIMAGE_TARGET)
endif
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RECOVERYIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RECOVERYIMAGE_FILES)
endif
# Conditionally depend on the image files if the image is being built so the
@@ -5748,68 +5926,68 @@
# if it is coming from a prebuilt.
ifdef BUILDING_VENDOR_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDORIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDORIMAGE_FILES)
else ifdef BOARD_PREBUILT_VENDORIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDORIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDORIMAGE_TARGET)
endif
ifdef BUILDING_PRODUCT_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_PRODUCTIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_PRODUCTIMAGE_FILES)
else ifdef BOARD_PREBUILT_PRODUCTIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_PRODUCTIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_PRODUCTIMAGE_TARGET)
endif
ifdef BUILDING_SYSTEM_EXT_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_EXTIMAGE_FILES)
else ifdef BOARD_PREBUILT_SYSTEM_EXTIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_EXTIMAGE_TARGET)
endif
ifneq (,$(BUILDING_BOOT_IMAGE)$(BUILDING_INIT_BOOT_IMAGE))
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_RAMDISK_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_RAMDISK_FILES)
endif # BUILDING_BOOT_IMAGE != "" || BUILDING_INIT_BOOT_IMAGE != ""
ifneq (,$(INTERNAL_PREBUILT_BOOTIMAGE) $(filter true,$(BOARD_COPY_BOOT_IMAGE_TO_TARGET_FILES)))
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTIMAGE_TARGET)
endif
ifdef BUILDING_ODM_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODMIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODMIMAGE_FILES)
else ifdef BOARD_PREBUILT_ODMIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODMIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODMIMAGE_TARGET)
endif
ifdef BUILDING_VENDOR_DLKM_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_VENDOR_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_VENDOR_DLKMIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_VENDOR_DLKMIMAGE_TARGET)
endif
ifdef BUILDING_ODM_DLKM_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_ODM_DLKMIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_ODM_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_ODM_DLKMIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_ODM_DLKMIMAGE_TARGET)
endif
ifdef BUILDING_SYSTEM_DLKM_IMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
+ $(BUILT_TARGET_FILES_DIR): $(INTERNAL_SYSTEM_DLKMIMAGE_FILES)
else ifdef BOARD_PREBUILT_SYSTEM_DLKMIMAGE
- $(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
+ $(BUILT_TARGET_FILES_DIR): $(INSTALLED_SYSTEM_DLKMIMAGE_TARGET)
endif
ifeq ($(BUILD_QEMU_IMAGES),true)
MK_VBMETA_BOOT_KERNEL_CMDLINE_SH := device/generic/goldfish/tools/mk_vbmeta_boot_params.sh
- $(BUILT_TARGET_FILES_PACKAGE): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
+ $(BUILT_TARGET_FILES_DIR): $(MK_VBMETA_BOOT_KERNEL_CMDLINE_SH)
endif
ifdef BOARD_PREBUILT_BOOTLOADER
-$(BUILT_TARGET_FILES_PACKAGE): $(INSTALLED_BOOTLOADER_MODULE)
+$(BUILT_TARGET_FILES_DIR): $(INSTALLED_BOOTLOADER_MODULE)
droidcore-unbundled: $(INSTALLED_BOOTLOADER_MODULE)
endif
# Depending on the various images guarantees that the underlying
# directories are up-to-date.
-$(BUILT_TARGET_FILES_PACKAGE): \
+$(BUILT_TARGET_FILES_DIR): \
$(INSTALLED_RADIOIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
$(INSTALLED_CACHEIMAGE_TARGET) \
@@ -5837,6 +6015,7 @@
$(LPMAKE) \
$(SELINUX_FC) \
$(INSTALLED_MISC_INFO_TARGET) \
+ $(INSTALLED_FASTBOOT_INFO_TARGET) \
$(APKCERTS_FILE) \
$(SOONG_APEX_KEYS_FILE) \
$(SOONG_ZIP) \
@@ -5846,7 +6025,7 @@
$(BUILT_KERNEL_CONFIGS_FILE) \
$(BUILT_KERNEL_VERSION_FILE) \
| $(ACP)
- @echo "Package target files: $@"
+ @echo "Building target files: $@"
$(hide) rm -rf $@ $@.list $(zip_root)
$(hide) mkdir -p $(dir $@) $(zip_root)
ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT))$(filter true,$(BOARD_MOVE_RECOVERY_RESOURCES_TO_VENDOR_BOOT)))
@@ -6055,6 +6234,9 @@
$(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
$(hide) cp $(SELINUX_FC) $(zip_root)/META/file_contexts.bin
$(hide) cp $(INSTALLED_MISC_INFO_TARGET) $(zip_root)/META/misc_info.txt
+ifneq ($(INSTALLED_FASTBOOT_INFO_TARGET),)
+ $(hide) cp $(INSTALLED_FASTBOOT_INFO_TARGET) $(zip_root)/META/fastboot-info.txt
+endif
ifneq ($(PRODUCT_SYSTEM_BASE_FS_PATH),)
$(hide) cp $(PRODUCT_SYSTEM_BASE_FS_PATH) \
$(zip_root)/META/$(notdir $(PRODUCT_SYSTEM_BASE_FS_PATH))
@@ -6249,13 +6431,19 @@
endif
@# Zip everything up, preserving symlinks and placing META/ files first to
@# help early validation of the .zip file while uploading it.
- $(hide) find $(zip_root)/META | sort >$@.list
- $(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@.list
+ $(hide) find $(zip_root)/META | sort >$@
+ $(hide) find $(zip_root) -path $(zip_root)/META -prune -o -print | sort >>$@
+
+$(BUILT_TARGET_FILES_PACKAGE): $(BUILT_TARGET_FILES_DIR)
+ @echo "Packaging target files: $@"
$(hide) $(SOONG_ZIP) -d -o $@ -C $(zip_root) -r $@.list
.PHONY: target-files-package
target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
+.PHONY: target-files-dir
+target-files-dir: $(BUILT_TARGET_FILES_DIR)
+
$(call declare-1p-container,$(BUILT_TARGET_FILES_PACKAGE),)
$(call declare-container-license-deps,$(BUILT_TARGET_FILES_PACKAGE), $(INSTALLED_RADIOIMAGE_TARGET) \
$(INSTALLED_RECOVERYIMAGE_TARGET) \
@@ -6283,6 +6471,7 @@
$(LPMAKE) \
$(SELINUX_FC) \
$(INSTALLED_MISC_INFO_TARGET) \
+ $(INSTALLED_FASTBOOT_INFO_TARGET) \
$(APKCERTS_FILE) \
$(SOONG_APEX_KEYS_FILE) \
$(HOST_OUT_EXECUTABLES)/fs_config \
@@ -6316,11 +6505,10 @@
PATH=$(INTERNAL_USERIMAGES_BINARY_PATHS):$(dir $(ZIP2ZIP)):$$PATH \
$(OTA_FROM_TARGET_FILES) \
--verbose \
- --extracted_input_target_files $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) \
--path $(HOST_OUT) \
$(if $(OEM_OTA_CONFIG), --oem_settings $(OEM_OTA_CONFIG)) \
$(2) \
- $(BUILT_TARGET_FILES_PACKAGE) $(1)
+ $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE)) $(1)
endef
product_name := $(TARGET_PRODUCT)
@@ -6336,7 +6524,7 @@
$(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
$(INTERNAL_OTA_PACKAGE_TARGET): .KATI_IMPLICIT_OUTPUTS := $(INTERNAL_OTA_METADATA)
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
@echo "Package OTA: $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --output_metadata_path $(INTERNAL_OTA_METADATA))
@@ -6372,7 +6560,7 @@
INTERNAL_OTA_PARTIAL_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
+$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET): $(BUILT_TARGET_FILES_DIR) $(OTA_FROM_TARGET_FILES) $(INTERNAL_OTATOOLS_FILES)
@echo "Package partial OTA: $@"
$(call build-ota-package-target,$@,-k $(KEY_CERT_PAIR) --partial "$(BOARD_PARTIAL_OTA_UPDATE_PARTITIONS_LIST)")
@@ -6695,7 +6883,7 @@
# For real devices and for dist builds, build super image from target files to an intermediate directory.
INTERNAL_SUPERIMAGE_DIST_TARGET := $(call intermediates-dir-for,PACKAGING,super.img)/super.img
$(INTERNAL_SUPERIMAGE_DIST_TARGET): extracted_input_target_files := $(patsubst %.zip,%,$(BUILT_TARGET_FILES_PACKAGE))
-$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_PACKAGE) $(BUILD_SUPER_IMAGE)
+$(INTERNAL_SUPERIMAGE_DIST_TARGET): $(LPMAKE) $(BUILT_TARGET_FILES_DIR) $(BUILD_SUPER_IMAGE)
$(call pretty,"Target super fs image from target files: $@")
PATH=$(dir $(LPMAKE)):$$PATH \
$(BUILD_SUPER_IMAGE) -v $(extracted_input_target_files) $@
@@ -7151,17 +7339,9 @@
haiku: $(SOONG_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_FUZZ_TARGETS)
$(call dist-for-goals,haiku,$(SOONG_FUZZ_PACKAGING_ARCH_MODULES))
$(call dist-for-goals,haiku,$(PRODUCT_OUT)/module-info.json)
-
-.PHONY: haiku-java-device
-haiku-java-device: $(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_DEVICE_TARGETS)
-$(call dist-for-goals,haiku-java-device,$(SOONG_JAVA_FUZZ_DEVICE_PACKAGING_ARCH_MODULES))
-$(call dist-for-goals,haiku-java-device,$(PRODUCT_OUT)/module-info.json)
-
-.PHONY: haiku-java-host
-haiku-java-host: $(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_HOST_TARGETS)
-$(call dist-for-goals,haiku-java-host,$(SOONG_JAVA_FUZZ_HOST_PACKAGING_ARCH_MODULES))
-$(call dist-for-goals,haiku-java-host,$(PRODUCT_OUT)/module-info.json)
-
+.PHONY: haiku-java
+haiku-java: $(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_JAVA_FUZZ_TARGETS)
+$(call dist-for-goals,haiku-java,$(SOONG_JAVA_FUZZ_PACKAGING_ARCH_MODULES))
.PHONY: haiku-rust
haiku-rust: $(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES) $(ALL_RUST_FUZZ_TARGETS)
$(call dist-for-goals,haiku-rust,$(SOONG_RUST_FUZZ_PACKAGING_ARCH_MODULES))
diff --git a/core/all_versions.bzl b/core/all_versions.bzl
new file mode 100644
index 0000000..33da673
--- /dev/null
+++ b/core/all_versions.bzl
@@ -0,0 +1,23 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+_all_versions = ["OPR1", "OPD1", "OPD2", "OPM1", "OPM2", "PPR1", "PPD1", "PPD2", "PPM1", "PPM2", "QPR1"] + [
+ version + subversion
+ for version in ["Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z"]
+ for subversion in ["P1A", "P1B", "P2A", "P2B", "D1A", "D1B", "D2A", "D2B", "Q1A", "Q1B", "Q2A", "Q2B", "Q3A", "Q3B"]
+]
+
+variables_to_export_to_make = {
+ "ALL_VERSIONS": _all_versions,
+}
diff --git a/core/android_soong_config_vars.mk b/core/android_soong_config_vars.mk
index 718adb5..c52fa92 100644
--- a/core/android_soong_config_vars.mk
+++ b/core/android_soong_config_vars.mk
@@ -101,6 +101,9 @@
endif
$(call soong_config_set,art_module,source_build,$(ART_MODULE_BUILD_FROM_SOURCE))
+ifdef ART_DEBUG_OPT_FLAG
+$(call soong_config_set,art_module,art_debug_opt_flag,$(ART_DEBUG_OPT_FLAG))
+endif
ifdef TARGET_BOARD_AUTO
$(call add_soong_config_var_value, ANDROID, target_board_auto, $(TARGET_BOARD_AUTO))
diff --git a/core/base_rules.mk b/core/base_rules.mk
index c453469..65e80fb 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -190,18 +190,6 @@
$(call pretty-error,unusual tags: $(filter-out tests optional samples,$(my_module_tags)))
endif
-# Add implicit tags.
-#
-# If the local directory or one of its parents contains a MODULE_LICENSE_GPL
-# file, tag the module as "gnu". Search for "*_GPL*", "*_LGPL*" and "*_MPL*"
-# so that we can also find files like MODULE_LICENSE_GPL_AND_AFL
-#
-gpl_license_file := $(call find-parent-file,$(LOCAL_PATH),MODULE_LICENSE*_GPL* MODULE_LICENSE*_MPL* MODULE_LICENSE*_LGPL*)
-ifneq ($(gpl_license_file),)
- my_module_tags += gnu
- ALL_GPL_MODULE_LICENSE_FILES += $(gpl_license_file)
-endif
-
LOCAL_MODULE_CLASS := $(strip $(LOCAL_MODULE_CLASS))
ifneq ($(words $(LOCAL_MODULE_CLASS)),1)
$(error $(LOCAL_PATH): LOCAL_MODULE_CLASS must contain exactly one word, not "$(LOCAL_MODULE_CLASS)")
diff --git a/core/board_config.mk b/core/board_config.mk
index fae7aaa..bdfb279 100644
--- a/core/board_config.mk
+++ b/core/board_config.mk
@@ -256,7 +256,7 @@
endif
$(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_board_config_results.mk \
- $(OUT_DIR)/rbcrun RBC_OUT="make" $(OUT_DIR)/rbc/boardlauncher.rbc)
+ $(OUT_DIR)/rbcrun --mode=rbc $(OUT_DIR)/rbc/boardlauncher.rbc)
ifneq ($(.SHELLSTATUS),0)
$(error board configuration runner failed: $(.SHELLSTATUS))
endif
diff --git a/core/config.mk b/core/config.mk
index 26e90ef..e272389 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -42,6 +42,7 @@
# Mark variables deprecated/obsolete
CHANGES_URL := https://android.googlesource.com/platform/build/+/master/Changes.md
.KATI_READONLY := CHANGES_URL
+$(KATI_deprecated_var TARGET_USES_64_BIT_BINDER,All devices use 64-bit binder by default now. Uses of TARGET_USES_64_BIT_BINDER should be removed.)
$(KATI_obsolete_var PATH,Do not use PATH directly. See $(CHANGES_URL)#PATH)
$(KATI_obsolete_var PYTHONPATH,Do not use PYTHONPATH directly. See $(CHANGES_URL)#PYTHONPATH)
$(KATI_obsolete_var OUT,Use OUT_DIR instead. See $(CHANGES_URL)#OUT)
@@ -270,7 +271,7 @@
# Ex: $(call add_soong_config_namespace,acme)
define add_soong_config_namespace
-$(eval SOONG_CONFIG_NAMESPACES += $1) \
+$(eval SOONG_CONFIG_NAMESPACES += $(strip $1)) \
$(eval SOONG_CONFIG_$(strip $1) :=)
endef
@@ -280,8 +281,8 @@
# $1 is the namespace. $2 is the list of variables.
# Ex: $(call add_soong_config_var,acme,COOL_FEATURE_A COOL_FEATURE_B)
define add_soong_config_var
-$(eval SOONG_CONFIG_$(strip $1) += $2) \
-$(foreach v,$(strip $2),$(eval SOONG_CONFIG_$(strip $1)_$v := $($v)))
+$(eval SOONG_CONFIG_$(strip $1) += $(strip $2)) \
+$(foreach v,$(strip $2),$(eval SOONG_CONFIG_$(strip $1)_$v := $(strip $($v))))
endef
# The add_soong_config_var_value function defines a make variable and also adds
@@ -290,7 +291,7 @@
# Ex: $(call add_soong_config_var_value,acme,COOL_FEATURE,true)
define add_soong_config_var_value
-$(eval $2 := $3) \
+$(eval $(strip $2) := $(strip $3)) \
$(call add_soong_config_var,$1,$2)
endef
@@ -298,8 +299,8 @@
#
# internal utility to define a namespace and a variable in it.
define soong_config_define_internal
-$(if $(filter $1,$(SOONG_CONFIG_NAMESPACES)),,$(eval SOONG_CONFIG_NAMESPACES:=$(SOONG_CONFIG_NAMESPACES) $1)) \
-$(if $(filter $2,$(SOONG_CONFIG_$(strip $1))),,$(eval SOONG_CONFIG_$(strip $1):=$(SOONG_CONFIG_$(strip $1)) $2))
+$(if $(filter $1,$(SOONG_CONFIG_NAMESPACES)),,$(eval SOONG_CONFIG_NAMESPACES:=$(SOONG_CONFIG_NAMESPACES) $(strip $1))) \
+$(if $(filter $2,$(SOONG_CONFIG_$(strip $1))),,$(eval SOONG_CONFIG_$(strip $1):=$(SOONG_CONFIG_$(strip $1)) $(strip $2)))
endef
# soong_config_set defines the variable in the given Soong config namespace
@@ -308,7 +309,7 @@
# Ex: $(call soong_config_set,acme,COOL_FEATURE,true)
define soong_config_set
$(call soong_config_define_internal,$1,$2) \
-$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$3)
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(strip $3))
endef
# soong_config_append appends to the value of the variable in the given Soong
@@ -317,7 +318,7 @@
# $1 is the namespace, $2 is the variable name, $3 is the value
define soong_config_append
$(call soong_config_define_internal,$1,$2) \
-$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(SOONG_CONFIG_$(strip $1)_$(strip $2)) $3)
+$(eval SOONG_CONFIG_$(strip $1)_$(strip $2):=$(SOONG_CONFIG_$(strip $1)_$(strip $2)) $(strip $3))
endef
# soong_config_append gets to the value of the variable in the given Soong
@@ -358,6 +359,51 @@
# are specific to the user's build configuration.
include $(BUILD_SYSTEM)/envsetup.mk
+# Returns true if it is a low memory device, otherwise it returns false.
+define is-low-mem-device
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_PROPERTY_OVERRIDES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_DEFAULT_PROPERTY_OVERRIDES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_COMPATIBLE_PROPERTY_OVERRIDE)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_COMPATIBLE_PROPERTY)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_SYSTEM_DEFAULT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_SYSTEM_EXT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_PRODUCT_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_VENDOR_PROPERTIES)),true,\
+$(if $(findstring ro.config.low_ram=true,$(PRODUCT_ODM_PROPERTIES)),true,false)))))))))
+endef
+
+# Get the board API level.
+board_api_level := $(PLATFORM_SDK_VERSION)
+ifdef BOARD_API_LEVEL
+ board_api_level := $(BOARD_API_LEVEL)
+else ifdef BOARD_SHIPPING_API_LEVEL
+ # Vendors with GRF must define BOARD_SHIPPING_API_LEVEL for the vendor API level.
+ board_api_level := $(BOARD_SHIPPING_API_LEVEL)
+endif
+
+# Calculate the VSR vendor API level.
+vsr_vendor_api_level := $(board_api_level)
+
+ifdef PRODUCT_SHIPPING_API_LEVEL
+ vsr_vendor_api_level := $(call math_min,$(PRODUCT_SHIPPING_API_LEVEL),$(board_api_level))
+endif
+
+# Set TARGET_MAX_PAGE_SIZE_SUPPORTED.
+ifdef PRODUCT_MAX_PAGE_SIZE_SUPPORTED
+ TARGET_MAX_PAGE_SIZE_SUPPORTED := $(PRODUCT_MAX_PAGE_SIZE_SUPPORTED)
+else ifeq ($(strip $(call is-low-mem-device)),true)
+ # Low memory device will have 4096 binary alignment.
+ TARGET_MAX_PAGE_SIZE_SUPPORTED := 4096
+else
+ # The default binary alignment for userspace is 4096.
+ TARGET_MAX_PAGE_SIZE_SUPPORTED := 4096
+ # When VSR vendor API level >= 34, binary alignment will be 65536.
+ ifeq ($(call math_gt_or_eq,$(vsr_vendor_api_level),34),true)
+ TARGET_MAX_PAGE_SIZE_SUPPORTED := 65536
+ endif
+endif
+.KATI_READONLY := TARGET_MAX_PAGE_SIZE_SUPPORTED
+
# Pruned directory options used when using findleaves.py
# See envsetup.mk for a description of SCAN_EXCLUDE_DIRS
FIND_LEAVES_EXCLUDES := $(addprefix --prune=, $(SCAN_EXCLUDE_DIRS) .repo .git)
@@ -500,8 +546,10 @@
TARGET_BUILD_USE_PREBUILT_SDKS :=
DISABLE_PREOPT :=
+DISABLE_PREOPT_BOOT_IMAGES :=
ifneq (,$(TARGET_BUILD_APPS)$(TARGET_BUILD_UNBUNDLED_IMAGE))
DISABLE_PREOPT := true
+ DISABLE_PREOPT_BOOT_IMAGES := true
endif
ifeq (true,$(TARGET_BUILD_UNBUNDLED))
ifneq (true,$(UNBUNDLED_BUILD_SDKS_FROM_SOURCE))
@@ -512,6 +560,7 @@
.KATI_READONLY := \
TARGET_BUILD_USE_PREBUILT_SDKS \
DISABLE_PREOPT \
+ DISABLE_PREOPT_BOOT_IMAGES \
prebuilt_sdk_tools := prebuilts/sdk/tools
prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
@@ -783,13 +832,6 @@
ifneq ($(call numbers_less_than,$(min_systemsdk_version),$(BOARD_SYSTEMSDK_VERSIONS)),)
$(error BOARD_SYSTEMSDK_VERSIONS ($(BOARD_SYSTEMSDK_VERSIONS)) must all be greater than or equal to BOARD_API_LEVEL, BOARD_SHIPPING_API_LEVEL or PRODUCT_SHIPPING_API_LEVEL ($(min_systemsdk_version)))
endif
- ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),28),)
- ifneq ($(TARGET_IS_64_BIT), true)
- ifneq ($(TARGET_USES_64_BIT_BINDER), true)
- $(error When PRODUCT_SHIPPING_API_LEVEL >= 28, TARGET_USES_64_BIT_BINDER must be true)
- endif
- endif
- endif
ifneq ($(call math_gt_or_eq,$(PRODUCT_SHIPPING_API_LEVEL),29),)
ifneq ($(BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE),)
$(error When PRODUCT_SHIPPING_API_LEVEL >= 29, BOARD_OTA_FRAMEWORK_VBMETA_VERSION_OVERRIDE cannot be set)
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 35c632c..0e84f516 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -155,6 +155,17 @@
endif
endif
+# Enable HWASan in included paths.
+ifeq ($(filter hwaddress, $(my_sanitize)),)
+ combined_include_paths := $(HWASAN_INCLUDE_PATHS) \
+ $(PRODUCT_HWASAN_INCLUDE_PATHS)
+
+ ifneq ($(strip $(foreach dir,$(subst $(comma),$(space),$(combined_include_paths)),\
+ $(filter $(dir)%,$(LOCAL_PATH)))),)
+ my_sanitize := hwaddress $(my_sanitize)
+ endif
+endif
+
# If CFI is disabled globally, remove it from my_sanitize.
ifeq ($(strip $(ENABLE_CFI)),false)
my_sanitize := $(filter-out cfi,$(my_sanitize))
diff --git a/core/definitions.mk b/core/definitions.mk
index ce1248e..7697211 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -75,9 +75,6 @@
# All findbugs xml files
ALL_FINDBUGS_FILES:=
-# GPL module license files
-ALL_GPL_MODULE_LICENSE_FILES:=
-
# Packages with certificate violation
CERTIFICATE_VIOLATION_MODULES :=
@@ -597,7 +594,7 @@
define declare-copy-target-license-metadata
$(strip $(if $(filter $(OUT_DIR)%,$(2)),\
$(eval _tgt:=$(strip $(1)))\
- $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2)))\
+ $(eval ALL_COPIED_TARGETS.$(_tgt).SOURCES := $(sort $(ALL_COPIED_TARGETS.$(_tgt).SOURCES) $(filter $(OUT_DIR)%,$(2))))\
$(eval ALL_COPIED_TARGETS += $(_tgt))))
endef
@@ -897,7 +894,8 @@
endef
###########################################################
-## Declare license dependencies $(2) for non-module target $(1)
+## Declare license dependencies $(2) with optional colon-separated
+## annotations for non-module target $(1)
###########################################################
define declare-license-deps
$(strip \
@@ -909,7 +907,8 @@
endef
###########################################################
-## Declare license dependencies $(2) for non-module container-type target $(1)
+## Declare license dependencies $(2) with optional colon-separated
+## annotations for non-module container-type target $(1)
##
## Container-type targets are targets like .zip files that
## merely aggregate other files.
@@ -2960,7 +2959,7 @@
$(extract-package) \
echo "Module name in Android tree: $(PRIVATE_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
echo "Local path in Android tree: $(PRIVATE_PATH)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
- echo "Install path on $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT): $(PRIVATE_INSTALLED_MODULE)" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
+ echo "Install path: $(patsubst $(PRODUCT_OUT)/%,%,$(PRIVATE_INSTALLED_MODULE))" >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log && \
echo >> $(PRODUCT_OUT)/appcompat/$(PRIVATE_MODULE).log
endef
ART_VERIDEX_APPCOMPAT_SCRIPT:=$(HOST_OUT)/bin/appcompat.sh
diff --git a/core/dex_preopt_config.mk b/core/dex_preopt_config.mk
index e36e2eb..7b9c4db 100644
--- a/core/dex_preopt_config.mk
+++ b/core/dex_preopt_config.mk
@@ -12,9 +12,15 @@
# would result in passing bad arguments to dex2oat and failing the build.
ENABLE_PREOPT :=
ENABLE_PREOPT_BOOT_IMAGES :=
-else ifeq (true,$(DISABLE_PREOPT))
- # Disable dexpreopt for libraries/apps, but do compile boot images.
- ENABLE_PREOPT :=
+else
+ ifeq (true,$(DISABLE_PREOPT))
+ # Disable dexpreopt for libraries/apps, but may compile boot images.
+ ENABLE_PREOPT :=
+ endif
+ ifeq (true,$(DISABLE_PREOPT_BOOT_IMAGES))
+ # Disable dexpreopt for boot images, but may compile libraries/apps.
+ ENABLE_PREOPT_BOOT_IMAGES :=
+ endif
endif
# The default value for LOCAL_DEX_PREOPT
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index d498875..7165bea 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -84,12 +84,13 @@
ifndef LOCAL_DEX_PREOPT_GENERATE_PROFILE
# If LOCAL_DEX_PREOPT_GENERATE_PROFILE is not defined, default it based on the existence of the
# profile class listing. TODO: Use product specific directory here.
- my_classes_directory := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)
- LOCAL_DEX_PREOPT_PROFILE := $(my_classes_directory)/$(LOCAL_MODULE).prof
+ ifdef PRODUCT_DEX_PREOPT_PROFILE_DIR
+ LOCAL_DEX_PREOPT_PROFILE := $(PRODUCT_DEX_PREOPT_PROFILE_DIR)/$(LOCAL_MODULE).prof
- ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
- my_process_profile := true
- my_profile_is_text_listing :=
+ ifneq (,$(wildcard $(LOCAL_DEX_PREOPT_PROFILE)))
+ my_process_profile := true
+ my_profile_is_text_listing :=
+ endif
endif
else
my_process_profile := $(LOCAL_DEX_PREOPT_GENERATE_PROFILE)
@@ -240,7 +241,7 @@
--enforce-uses-libraries-relax,)
my_dexpreopt_config_args := $(patsubst %,--dexpreopt-config %,$(my_dexpreopt_dep_configs))
- my_enforced_uses_libraries := $(intermediates.COMMON)/enforce_uses_libraries.status
+ my_enforced_uses_libraries := $(intermediates)/enforce_uses_libraries.status
$(my_enforced_uses_libraries): PRIVATE_USES_LIBRARIES := $(my_uses_libs_args)
$(my_enforced_uses_libraries): PRIVATE_OPTIONAL_USES_LIBRARIES := $(my_optional_uses_libs_args)
$(my_enforced_uses_libraries): PRIVATE_DEXPREOPT_CONFIGS := $(my_dexpreopt_config_args)
@@ -473,7 +474,7 @@
my_dexpreopt_deps += $(my_dexpreopt_images_deps)
my_dexpreopt_deps += $(DEXPREOPT_BOOTCLASSPATH_DEX_FILES)
ifeq ($(LOCAL_ENFORCE_USES_LIBRARIES),true)
- my_dexpreopt_deps += $(intermediates.COMMON)/enforce_uses_libraries.status
+ my_dexpreopt_deps += $(intermediates)/enforce_uses_libraries.status
endif
$(my_dexpreopt_zip): PRIVATE_MODULE := $(LOCAL_MODULE)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 7dd9b12..8887ddc 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -24,14 +24,30 @@
#$(warning $(call find_and_earlier,A B C,C))
#$(warning $(call find_and_earlier,A B C,D))
-define version-list
-$(1)P1A $(1)P1B $(1)P2A $(1)P2B $(1)D1A $(1)D1B $(1)D2A $(1)D2B $(1)Q1A $(1)Q1B $(1)Q2A $(1)Q2B $(1)Q3A $(1)Q3B
+# Runs the starlark file given in $(1), and sets all the variables in its top-level
+# variables_to_export_to_make variable as make variables.
+#
+# In order to avoid running starlark every time the stamp file is checked, we use
+# $(KATI_shell_no_rerun). Then, to make sure that we actually do rerun kati when
+# modifying the starlark files, we add the starlark files to the kati stamp file with
+# $(KATI_extra_file_deps).
+define run-starlark
+$(eval _starlark_results := $(OUT_DIR)/starlark_results/$(subst /,_,$(1)).mk)
+$(KATI_shell_no_rerun mkdir -p $(OUT_DIR)/starlark_results && $(OUT_DIR)/rbcrun --mode=make $(1) >$(_starlark_results) && touch -t 200001010000 $(_starlark_results))
+$(if $(filter-out 0,$(.SHELLSTATUS)),$(error Starlark failed to run))
+$(eval include $(_starlark_results))
+$(KATI_extra_file_deps $(LOADED_STARLARK_FILES))
+$(eval LOADED_STARLARK_FILES :=)
+$(eval _starlark_results :=)
endef
-PREV_VERSIONS := OPR1 OPD1 OPD2 OPM1 OPM2 PPR1 PPD1 PPD2 PPM1 PPM2 QPR1
-ALL_VERSIONS := Q R S T U V W X Y Z
-ALL_VERSIONS := $(PREV_VERSIONS) $(foreach v,$(ALL_VERSIONS),$(call version-list,$(v)))
-PREV_VERSIONS :=
+# ---------------------------------------------------------------
+# Release config
+include $(BUILD_SYSTEM)/release_config.mk
+
+# ---------------------------------------------------------------
+# defines ALL_VERSIONS
+$(call run-starlark,build/make/core/all_versions.bzl)
# Filters ALL_VERSIONS down to the range [$1, $2], and errors if $1 > $2 or $3 is
# not in [$1, $2]
@@ -339,6 +355,7 @@
RBC_PRODUCT_CONFIG \
RBC_BOARD_CONFIG \
SOONG_% \
+ TARGET_RELEASE \
TOPDIR \
TRACE_BEGIN_SOONG \
USER)
@@ -553,6 +570,8 @@
TARGET_OUT_NOTICE_FILES := $(TARGET_OUT_INTERMEDIATES)/NOTICE_FILES
TARGET_OUT_FAKE := $(PRODUCT_OUT)/fake_packages
TARGET_OUT_TESTCASES := $(PRODUCT_OUT)/testcases
+TARGET_OUT_FLAGS := $(TARGET_OUT_INTERMEDIATES)/FLAGS
+
.KATI_READONLY := \
TARGET_OUT_EXECUTABLES \
TARGET_OUT_OPTIONAL_EXECUTABLES \
@@ -566,7 +585,8 @@
TARGET_OUT_ETC \
TARGET_OUT_NOTICE_FILES \
TARGET_OUT_FAKE \
- TARGET_OUT_TESTCASES
+ TARGET_OUT_TESTCASES \
+ TARGET_OUT_FLAGS
ifeq ($(SANITIZE_LITE),true)
# When using SANITIZE_LITE, APKs must not be packaged with sanitized libraries, as they will not
diff --git a/core/generate_enforce_rro.mk b/core/generate_enforce_rro.mk
index 9079981..e149ef4 100644
--- a/core/generate_enforce_rro.mk
+++ b/core/generate_enforce_rro.mk
@@ -1,6 +1,6 @@
include $(CLEAR_VARS)
-enforce_rro_module := $(enforce_rro_source_module)__auto_generated_rro_$(enforce_rro_partition)
+enforce_rro_module := $(enforce_rro_source_module)__$(PRODUCT_NAME)__auto_generated_rro_$(enforce_rro_partition)
LOCAL_PACKAGE_NAME := $(enforce_rro_module)
intermediates := $(call intermediates-dir-for,APPS,$(LOCAL_PACKAGE_NAME),,COMMON)
diff --git a/core/main.mk b/core/main.mk
index a738dd9..a747967 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -347,6 +347,10 @@
ADDITIONAL_PRODUCT_PROPERTIES += ro.product.ab_ota_partitions=$(subst $(space),$(comma),$(sort $(AB_OTA_PARTITIONS)))
endif
+# Set this property for VTS to skip large page size tests on unsupported devices.
+ADDITIONAL_PRODUCT_PROPERTIES += \
+ ro.product.cpu.pagesize.max=$(TARGET_MAX_PAGE_SIZE_SUPPORTED)
+
# -----------------------------------------------------------------
###
### In this section we set up the things that are different
@@ -1252,6 +1256,7 @@
$(if $(filter tests,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_TESTS)) \
$(if $(filter asan,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_ASAN)) \
$(if $(filter java_coverage,$(tags_to_install)),$(call get-product-var,$(1),PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE)) \
+ $(if $(filter arm64,$(TARGET_ARCH) $(TARGET_2ND_ARCH)),$(call get-product-var,$(1),PRODUCT_PACKAGES_ARM64)) \
$(call auto-included-modules) \
) \
$(eval ### Filter out the overridden packages and executables before doing expansion) \
@@ -1385,29 +1390,6 @@
$(CUSTOM_MODULES) \
)
-ifdef FULL_BUILD
-#
-# Used by the cleanup logic in soong_ui to remove files that should no longer
-# be installed.
-#
-
-# Include all tests, so that we remove them from the test suites / testcase
-# folders when they are removed.
-test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
-
-$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
-
-$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
- $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
- $(modules_to_install) $(test_files)))))
-
-$(file >$(HOST_OUT)/.installable_test_files,$(sort \
- $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
- $(test_files)))))
-
-test_files :=
-endif
-
# Dedpulicate compatibility suite dist files across modules and packages before
# copying them to their requested locations. Assign the eval result to an unused
# var to prevent Make from trying to make a sense of it.
@@ -1466,6 +1448,28 @@
modules_to_install := $(sort $(ALL_DEFAULT_INSTALLED_MODULES))
ALL_DEFAULT_INSTALLED_MODULES :=
+ifdef FULL_BUILD
+#
+# Used by the cleanup logic in soong_ui to remove files that should no longer
+# be installed.
+#
+
+# Include all tests, so that we remove them from the test suites / testcase
+# folders when they are removed.
+test_files := $(foreach ts,$(ALL_COMPATIBILITY_SUITES),$(COMPATIBILITY.$(ts).FILES))
+
+$(shell mkdir -p $(PRODUCT_OUT) $(HOST_OUT))
+
+$(file >$(PRODUCT_OUT)/.installable_files$(if $(filter address,$(SANITIZE_TARGET)),_asan), \
+ $(sort $(patsubst $(PRODUCT_OUT)/%,%,$(filter $(PRODUCT_OUT)/%, \
+ $(modules_to_install) $(test_files)))))
+
+$(file >$(HOST_OUT)/.installable_test_files,$(sort \
+ $(patsubst $(HOST_OUT)/%,%,$(filter $(HOST_OUT)/%, \
+ $(test_files)))))
+
+test_files :=
+endif
# Some notice deps refer to module names without prefix or arch suffix where
# only the variants with them get built.
@@ -1806,6 +1810,8 @@
$(INTERNAL_OTA_PACKAGE_TARGET) \
$(INTERNAL_OTA_METADATA) \
$(INTERNAL_OTA_PARTIAL_PACKAGE_TARGET) \
+ $(BUILT_RAMDISK_16K_TARGET) \
+ $(BUILT_KERNEL_16K_TARGET) \
$(INTERNAL_OTA_RETROFIT_DYNAMIC_PARTITIONS_PACKAGE_TARGET) \
$(SYMBOLS_ZIP) \
$(SYMBOLS_MAPPING) \
@@ -2159,10 +2165,11 @@
$(shell rm $(PRODUCT_OUT)/sbom-metadata.csv >/dev/null 2>&1)
$(PRODUCT_OUT)/sbom-metadata.csv: $(installed_files)
rm -f $@
- @echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated >> $@
+ @echo installed_file$(comma)module_path$(comma)soong_module_type$(comma)is_prebuilt_make_module$(comma)product_copy_files$(comma)kernel_module_copy_files$(comma)is_platform_generated,build_output_path >> $@
$(foreach f,$(installed_files),\
$(eval _module_name := $(ALL_INSTALLED_FILES.$f)) \
$(eval _path_on_device := $(patsubst $(PRODUCT_OUT)/%,%,$f)) \
+ $(eval _build_output_path := $(PRODUCT_OUT)/$(_path_on_device)) \
$(eval _module_path := $(strip $(sort $(ALL_MODULES.$(_module_name).PATH)))) \
$(eval _soong_module_type := $(strip $(sort $(ALL_MODULES.$(_module_name).SOONG_MODULE_TYPE)))) \
$(eval _is_prebuilt_make_module := $(ALL_MODULES.$(_module_name).IS_PREBUILT_MAKE_MODULE)) \
@@ -2180,9 +2187,9 @@
$(eval _is_linker_config := $(if $(findstring $f,$(SYSTEM_LINKER_CONFIG) $(vendor_linker_config_file)),Y)) \
$(eval _is_partition_compat_symlink := $(if $(findstring $f,$(PARTITION_COMPAT_SYMLINKS)),Y)) \
$(eval _is_platform_generated := $(_is_build_prop)$(_is_notice_file)$(_is_dexpreopt_image_profile)$(_is_product_system_other_avbkey)$(_is_event_log_tags_file)$(_is_system_other_odex_marker)$(_is_kernel_modules_blocklist)$(_is_fsverity_build_manifest_apk)$(_is_linker_config)$(_is_partition_compat_symlink)) \
- @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ $(newline) \
+ @echo /$(_path_on_device)$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(_build_output_path) >> $@ $(newline) \
$(if $(_post_installed_dexpreopt_zip), \
- for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated) >> $@ ; done $(newline) \
+ for i in $$(zipinfo -1 $(_post_installed_dexpreopt_zip)); do echo /$$i$(comma)$(_module_path)$(comma)$(_soong_module_type)$(comma)$(_is_prebuilt_make_module)$(comma)$(_product_copy_files)$(comma)$(_kernel_module_copy_files)$(comma)$(_is_platform_generated)$(comma)$(PRODUCT_OUT)/$$i >> $@ ; done $(newline) \
) \
)
@@ -2192,16 +2199,23 @@
$(PRODUCT_OUT)/sbom.spdx.json: $(PRODUCT_OUT)/sbom.spdx
$(PRODUCT_OUT)/sbom.spdx: $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
rm -rf $@
- $(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --json
+ $(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --json
-$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json)
+$(call dist-for-goals,droid,$(PRODUCT_OUT)/sbom.spdx.json:sbom/sbom.spdx.json)
else
-apps_only_sbom_files := $(sort $(patsubst %,%.spdx,$(apps_only_installed_files)))
+apps_only_sbom_files := $(sort $(patsubst %,%.spdx.json,$(filter %.apk,$(apps_only_installed_files))))
$(apps_only_sbom_files): $(PRODUCT_OUT)/sbom-metadata.csv $(GEN_SBOM)
rm -rf $@
- $(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --product_out_dir=$(PRODUCT_OUT) --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr="$(PRODUCT_MANUFACTURER)" --unbundled
+ $(GEN_SBOM) --output_file $@ --metadata $(PRODUCT_OUT)/sbom-metadata.csv --build_version $(BUILD_FINGERPRINT_FROM_FILE) --product_mfr "$(PRODUCT_MANUFACTURER)" --unbundled_apk
sbom: $(apps_only_sbom_files)
+
+$(foreach f,$(apps_only_sbom_files),$(eval $(patsubst %.spdx.json,%-fragment.spdx,$f): $f))
+apps_only_fragment_files := $(patsubst %.spdx.json,%-fragment.spdx,$(apps_only_sbom_files))
+$(foreach f,$(apps_only_fragment_files),$(eval apps_only_fragment_dist_files += :sbom/$(notdir $f)))
+
+$(foreach f,$(apps_only_sbom_files),$(eval apps_only_sbom_dist_files += :sbom/$(notdir $f)))
+$(call dist-for-goals,apps_only,$(join $(apps_only_sbom_files),$(apps_only_sbom_dist_files)) $(join $(apps_only_fragment_files),$(apps_only_fragment_dist_files)))
endif
$(call dist-write-file,$(KATI_PACKAGE_MK_DIR)/dist.mk)
diff --git a/core/package_internal.mk b/core/package_internal.mk
index c7a173b..7cfab5b 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -111,24 +111,26 @@
# Determine whether auto-RRO is enabled for this package.
enforce_rro_enabled :=
-ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
- # * means all system and system_ext APKs, so enable conditionally based on module path.
+ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS)))
+ ifneq (,$(filter *, $(PRODUCT_ENFORCE_RRO_TARGETS)))
+ # * means all system and system_ext APKs, so enable conditionally based on module path.
- # Note that base_rules.mk has not yet been included, so it's likely that only
- # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
- ifeq (,$(LOCAL_MODULE_PATH))
- non_rro_target_module := $(filter true,\
- $(LOCAL_ODM_MODULE) \
- $(LOCAL_OEM_MODULE) \
- $(LOCAL_PRODUCT_MODULE) \
- $(LOCAL_PROPRIETARY_MODULE) \
- $(LOCAL_VENDOR_MODULE))
- enforce_rro_enabled := $(if $(non_rro_target_module),,true)
- else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+ # Note that base_rules.mk has not yet been included, so it's likely that only
+ # one of LOCAL_MODULE_PATH and the LOCAL_X_MODULE flags has been set.
+ ifeq (,$(LOCAL_MODULE_PATH))
+ non_rro_target_module := $(filter true,\
+ $(LOCAL_ODM_MODULE) \
+ $(LOCAL_OEM_MODULE) \
+ $(LOCAL_PRODUCT_MODULE) \
+ $(LOCAL_PROPRIETARY_MODULE) \
+ $(LOCAL_VENDOR_MODULE))
+ enforce_rro_enabled := $(if $(non_rro_target_module),,true)
+ else ifneq ($(filter $(TARGET_OUT)/%,$(LOCAL_MODULE_PATH)),)
+ enforce_rro_enabled := true
+ endif
+ else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
enforce_rro_enabled := true
endif
-else ifneq (,$(filter $(LOCAL_PACKAGE_NAME), $(PRODUCT_ENFORCE_RRO_TARGETS)))
- enforce_rro_enabled := true
endif
product_package_overlays := $(strip \
@@ -201,10 +203,10 @@
all_resources := $(strip $(my_res_resources) $(my_overlay_resources))
# The linked resource package.
-my_res_package := $(intermediates)/package-res.apk
+my_res_package := $(intermediates.COMMON)/package-res.apk
LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
-my_bundle_module := $(intermediates)/base.zip
+my_bundle_module := $(intermediates.COMMON)/base.zip
LOCAL_INTERMEDIATE_TARGETS += $(my_bundle_module)
# Always run aapt2, because we need to at least compile the AndroidManifest.xml.
@@ -570,7 +572,7 @@
$(compress-package)
endif # LOCAL_COMPRESSED_MODULE
-my_package_res_pb := $(intermediates)/package-res.pb.apk
+my_package_res_pb := $(intermediates.COMMON)/package-res.pb.apk
$(my_package_res_pb): $(my_res_package) $(AAPT2)
$(AAPT2) convert --output-format proto $< -o $@
diff --git a/core/product.mk b/core/product.mk
index 5f1e145..818aac2 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -27,7 +27,13 @@
_product_single_value_vars += PRODUCT_NAME_FOR_ATTESTATION
_product_single_value_vars += PRODUCT_MODEL_FOR_ATTESTATION
-# The resoure configuration options to use for this product.
+# Defines the ELF segment alignment for binaries (executables and shared libraries).
+# The ELF segment alignment has to be a PAGE_SIZE multiple. For example, if
+# PRODUCT_MAX_PAGE_SIZE_SUPPORTED=65536, the possible values for PAGE_SIZE could be
+# 4096, 16384 and 65536.
+_product_single_value_vars += PRODUCT_MAX_PAGE_SIZE_SUPPORTED
+
+# The resource configuration options to use for this product.
_product_list_vars += PRODUCT_LOCALES
_product_list_vars += PRODUCT_AAPT_CONFIG
_product_single_value_vars += PRODUCT_AAPT_PREF_CONFIG
@@ -36,11 +42,11 @@
_product_list_vars += PRODUCT_PACKAGES
_product_list_vars += PRODUCT_PACKAGES_DEBUG
_product_list_vars += PRODUCT_PACKAGES_DEBUG_ASAN
+_product_list_vars += PRODUCT_PACKAGES_ARM64
# Packages included only for eng/userdebug builds, when building with EMMA_INSTRUMENT=true
_product_list_vars += PRODUCT_PACKAGES_DEBUG_JAVA_COVERAGE
_product_list_vars += PRODUCT_PACKAGES_ENG
_product_list_vars += PRODUCT_PACKAGES_TESTS
-_product_list_vars += PRODUCT_AFDO_PROFILES
# The device that this product maps to.
_product_single_value_vars += PRODUCT_DEVICE
@@ -238,6 +244,9 @@
# Whether any paths are excluded from sanitization when SANITIZE_TARGET=cfi
_product_list_vars += PRODUCT_CFI_EXCLUDE_PATHS
+# Whether any paths should have HWASan enabled for components
+_product_list_vars += PRODUCT_HWASAN_INCLUDE_PATHS
+
# Whether the Scudo hardened allocator is disabled platform-wide
_product_single_value_vars += PRODUCT_DISABLE_SCUDO
@@ -395,6 +404,12 @@
# supports it
_product_single_value_vars += PRODUCT_ENABLE_UFFD_GC
+# Specifies COW version to be used by update_engine and libsnapshot. If this value is not
+# specified we default to COW version 2 in update_engine for backwards compatibility
+_product_single_value_vars += PRODUCT_VIRTUAL_AB_COW_VERSION
+
+_product_list_vars += PRODUCT_AFDO_PROFILES
+
.KATI_READONLY := _product_single_value_vars _product_list_vars
_product_var_list :=$= $(_product_single_value_vars) $(_product_list_vars)
diff --git a/core/product_config.mk b/core/product_config.mk
index db8ca01..01ad030 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -74,7 +74,7 @@
###########################################################
define find-copy-subdir-files
-$(sort $(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g"))
+$(shell find $(2) -name "$(1)" -type f | $(SED_EXTENDED) "s:($(2)/?(.*)):\\1\\:$(3)/\\2:" | sed "s://:/:g" | sort)
endef
#
@@ -223,7 +223,7 @@
endif
ifeq (,$(current_product_makefile))
- $(error Can not locate config makefile for product "$(TARGET_PRODUCT)")
+ $(error Cannot locate config makefile for product "$(TARGET_PRODUCT)")
endif
ifneq (,$(filter $(TARGET_PRODUCT),$(products_using_starlark_config)))
@@ -236,14 +236,22 @@
$(shell mkdir -p $(OUT_DIR)/rbc)
$(call dump-variables-rbc, $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
- $(shell build/soong/scripts/update_out \
- $(OUT_DIR)/rbc/rbc_product_config_results.mk \
- build/soong/scripts/rbc-run \
- $(current_product_makefile) \
- $(OUT_DIR)/rbc/make_vars_pre_product_config.mk)
+ $(shell $(OUT_DIR)/mk2rbc \
+ --mode=write -r --outdir $(OUT_DIR)/rbc \
+ --launcher=$(OUT_DIR)/rbc/launcher.rbc \
+ --input_variables=$(OUT_DIR)/rbc/make_vars_pre_product_config.mk \
+ --makefile_list=$(OUT_DIR)/.module_paths/configuration.list \
+ $(current_product_makefile))
ifneq ($(.SHELLSTATUS),0)
$(error product configuration converter failed: $(.SHELLSTATUS))
endif
+
+ $(shell build/soong/scripts/update_out $(OUT_DIR)/rbc/rbc_product_config_results.mk \
+ $(OUT_DIR)/rbcrun --mode=rbc $(OUT_DIR)/rbc/launcher.rbc)
+ ifneq ($(.SHELLSTATUS),0)
+ $(error product configuration runner failed: $(.SHELLSTATUS))
+ endif
+
include $(OUT_DIR)/rbc/rbc_product_config_results.mk
endif
@@ -517,7 +525,8 @@
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := $(OVERRIDE_PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE)
endif
else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- # No shipping level defined
+ # No shipping level defined. Enforce the product interface by default.
+ PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
# Enforce product interface if PRODUCT_SHIPPING_API_LEVEL is greater than 29.
PRODUCT_ENFORCE_PRODUCT_PARTITION_INTERFACE := true
@@ -532,7 +541,8 @@
ifneq ($(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE),)
PRODUCT_USE_PRODUCT_VNDK := $(PRODUCT_USE_PRODUCT_VNDK_OVERRIDE)
else ifeq ($(PRODUCT_SHIPPING_API_LEVEL),)
- # No shipping level defined
+ # No shipping level defined. Enforce the product interface by default.
+ PRODUCT_USE_PRODUCT_VNDK := true
else ifeq ($(call math_gt,$(PRODUCT_SHIPPING_API_LEVEL),29),true)
# Enforce product interface for VNDK if PRODUCT_SHIPPING_API_LEVEL is greater
# than 29.
diff --git a/core/product_config.rbc b/core/product_config.rbc
index 97c1d00..921f068 100644
--- a/core/product_config.rbc
+++ b/core/product_config.rbc
@@ -54,25 +54,16 @@
if value == None:
return
if type(value) == "list":
- if _options.rearrange:
- value = __printvars_rearrange_list(value)
- if _options.format == "pretty":
- print(attr, "=", repr(value))
- elif _options.format == "make":
- value = list(value)
- for i, x in enumerate(value):
- if type(x) == "tuple" and len(x) == 1:
- value[i] = "@inherit:" + x[0] + ".mk"
- elif type(x) != "string":
- fail("Wasn't a list of strings:", attr, " value:", value)
- print(attr, ":=", " ".join(value))
- elif _options.format == "pretty":
- print(attr, "=", repr(value))
- elif _options.format == "make":
+ value = list(value)
+ for i, x in enumerate(value):
+ if type(x) == "tuple" and len(x) == 1:
+ value[i] = "@inherit:" + x[0] + ".mk"
+ elif type(x) != "string":
+ fail("Wasn't a list of strings:", attr, " value:", value)
+ print(attr, ":=", " ".join(value))
+ else:
# Trim all spacing to a single space
print(attr, ":=", _mkstrip(value))
- else:
- fail("bad output format", _options.format)
def _printvars(state):
"""Prints configuration and global variables."""
@@ -83,8 +74,7 @@
for nsname, nsvars in sorted(val.items()):
# Define SOONG_CONFIG_<ns> for Make, othewise
# it cannot be added to .KATI_READONLY list
- if _options.format == "make":
- print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
+ print("SOONG_CONFIG_" + nsname, ":=", " ".join(nsvars.keys()))
for var, val in sorted(nsvars.items()):
if val:
__print_attr("SOONG_CONFIG_%s_%s" % (nsname, var), val)
@@ -105,11 +95,6 @@
elif attr not in globals_base or globals_base[attr] != val:
__print_attr(attr, val)
-def __printvars_rearrange_list(value_list):
- """Rearrange value list: return only distinct elements, maybe sorted."""
- seen = {item: 0 for item in value_list}
- return sorted(seen.keys()) if _options.rearrange == "sort" else seen.keys()
-
def __sort_pcm_names(pcm_names):
# We have to add an extension back onto the pcm names when sorting,
# or else the sort order could be wrong when one is a prefix of another.
@@ -394,7 +379,7 @@
def _soong_config_set(g, nsname, var, value):
"""Assigns the value to the variable in the namespace."""
_soong_config_namespace(g, nsname)
- g[_soong_config_namespaces_key][nsname][var]=value
+ g[_soong_config_namespaces_key][nsname][var]=_mkstrip(value)
def _soong_config_append(g, nsname, var, value):
"""Appends to the value of the variable in the namespace."""
@@ -402,9 +387,9 @@
ns = g[_soong_config_namespaces_key][nsname]
oldv = ns.get(var)
if oldv == None:
- ns[var] = value
+ ns[var] = _mkstrip(value)
else:
- ns[var] += " " + value
+ ns[var] += " " + _mkstrip(value)
def _soong_config_get(g, nsname, var):
@@ -691,16 +676,8 @@
rblf_log(file, "warning", message, sep = ':')
def _mk2rbc_error(loc, message):
- """Prints a message about conversion error and stops.
-
- If RBC_MK2RBC_CONTINUE environment variable is set,
- the execution will continue after the message is printed.
- """
- if _options.mk2rbc_continue:
- rblf_log(loc, message, sep = ':')
- else:
- _mkerror(loc, message)
-
+ """Prints a message about conversion error and stops."""
+ _mkerror(loc, message)
def _mkinfo(file, message = ""):
"""Prints info."""
@@ -873,39 +850,12 @@
# Cause the variable to appear set like the make version does
g[v] = ""
-
-def __get_options():
- """Returns struct containing runtime global settings."""
- settings = dict(
- format = "pretty",
- rearrange = "",
- trace_modules = False,
- trace_variables = [],
- mk2rbc_continue = False,
- )
- for x in getattr(rblf_cli, "RBC_OUT", "").split(","):
- if x == "sort" or x == "unique":
- if settings["rearrange"]:
- fail("RBC_OUT: either sort or unique is allowed (and sort implies unique)")
- settings["rearrange"] = x
- elif x == "pretty" or x == "make":
- settings["format"] = x
- elif x == "global":
- # TODO: Remove this, kept for backwards compatibility
- pass
- elif x != "":
- fail("RBC_OUT: got %s, should be one of: [pretty|make] [sort|unique]" % x)
- for x in getattr(rblf_cli, "RBC_DEBUG", "").split(","):
- if x == "!trace":
- settings["trace_modules"] = True
- elif x != "":
- settings["trace_variables"].append(x)
- if getattr(rblf_cli, "RBC_MK2RBC_CONTINUE", ""):
- settings["mk2rbc_continue"] = True
- return struct(**settings)
-
# Settings used during debugging.
-_options = __get_options()
+_options = struct(
+ trace_modules = False,
+ trace_variables = [],
+)
+
rblf = struct(
soong_config_namespace = _soong_config_namespace,
soong_config_append = _soong_config_append,
diff --git a/core/release_config.mk b/core/release_config.mk
new file mode 100644
index 0000000..fdfc6a0
--- /dev/null
+++ b/core/release_config.mk
@@ -0,0 +1,222 @@
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Partitions that get build system flag summaries
+_FLAG_PARTITIONS := system vendor system_ext product
+
+# All possible release flags. Defined in the build_flags.mk files
+# throughout the tree
+_ALL_RELEASE_FLAGS :=
+
+# -----------------------------------------------------------------
+# Choose the flag files
+# Do this first, because we're going to unset TARGET_RELEASE before
+# including anyone, so they don't start making conditionals based on it.
+
+# If this is a google source tree, restrict it to only the one file
+# which has OWNERS control. If it isn't let others define their own.
+# TODO: Remove wildcard for build/release one when all branch manifests
+# have updated.
+config_map_files := $(wildcard build/release/release_config_map.mk) \
+ $(if $(wildcard vendor/google/release/release_config_map.mk), \
+ vendor/google/release/release_config_map.mk, \
+ $(sort \
+ $(wildcard device/*/release/release_config_map.mk) \
+ $(wildcard device/*/*/release/release_config_map.mk) \
+ $(wildcard vendor/*/release/release_config_map.mk) \
+ $(wildcard vendor/*/*/release/release_config_map.mk) \
+ ) \
+ )
+
+# $1 config name
+# $2 release config files
+define declare-release-config
+ $(eval # No duplicates)
+ $(if $(filter $(_all_release_configs), $(strip $(1))), \
+ $(error declare-release-config: config $(strip $(1)) declared in: $(_included) Previously declared here: $(_all_release_configs.$(strip $(1)).DECLARED_IN)) \
+ )
+ $(eval # Must have release config files)
+ $(if $(strip $(2)),, \
+ $(error declare-release-config: config $(strip $(1)) must have release config files) \
+ )
+ $(eval _all_release_configs := $(sort $(_all_release_configs) $(strip $(1))))
+ $(eval _all_release_configs.$(strip $(1)).DECLARED_IN := $(_included))
+ $(eval _all_release_configs.$(strip $(1)).FILES := $(strip $(2)))
+endef
+
+# Include the config map files
+$(foreach f, $(config_map_files), \
+ $(eval _included := $(f)) \
+ $(eval include $(f)) \
+)
+
+# If TARGET_RELEASE is set, fail if there is no matching release config
+# If it isn't set, no release config files will be included and all flags
+# will get their default values.
+ifneq ($(TARGET_RELEASE),)
+ifeq ($(filter $(_all_release_configs), $(TARGET_RELEASE)),)
+ $(error No release config found for TARGET_RELEASE: $(TARGET_RELEASE). Available releases are: $(_all_release_configs))
+else
+ # Choose flag files
+ # Don't sort this, use it in the order they gave us.
+ _release_config_files := $(_all_release_configs.$(TARGET_RELEASE).FILES)
+endif
+else
+# Useful for finding scripts etc that aren't passing or setting TARGET_RELEASE
+ifneq ($(FAIL_IF_NO_RELEASE_CONFIG),)
+ $(error FAIL_IF_NO_RELEASE_CONFIG was set and TARGET_RELEASE was not)
+endif
+_release_config_files :=
+endif
+
+# Unset variables so they can't use it
+define declare-release-config
+$(error declare-release-config can only be called from inside release_config_map.mk files)
+endef
+
+# TODO: Remove this check after enough people have sourced lunch that we don't
+# need to worry about it trying to do get_build_vars TARGET_RELEASE. Maybe after ~9/2023
+ifneq ($(CALLED_FROM_SETUP),true)
+define TARGET_RELEASE
+$(error TARGET_RELEASE may not be accessed directly. Use individual flags.)
+endef
+else
+TARGET_RELEASE:=
+endif
+.KATI_READONLY := TARGET_RELEASE
+
+$(foreach config, $(_all_release_configs), \
+ $(eval _all_release_configs.$(config).DECLARED_IN:= ) \
+ $(eval _all_release_configs.$(config).FILES:= ) \
+)
+_all_release_configs:=
+config_map_files:=
+
+# -----------------------------------------------------------------
+# Declare the flags
+
+# $1 partition(s)
+# $2 flag name. Must start with RELEASE_
+# $3 default. True or false
+define declare-build-flag
+ $(if $(filter-out all $(_FLAG_PARTITIONS), $(strip $(1))), \
+ $(error declare-build-flag: invalid partitions: $(strip $(1))) \
+ )
+ $(if $(and $(filter all,$(strip $(1))),$(filter-out all, $(strip $(1)))), \
+ $(error declare-build-flag: "all" can't be combined with other partitions: $(strip $(1))), \
+ $(eval declare-build-flag.partition := $(_FLAG_PARTITIONS)) \
+ )
+ $(if $(filter-out RELEASE_%, $(strip $(2))), \
+ $(error declare-build-flag: Release flag names must start with RELEASE_: $(strip $(2))) \
+ )
+ $(eval _ALL_RELEASE_FLAGS += $(strip $(2)))
+ $(foreach partition, $(declare-build-flag.partition), \
+ $(eval _ALL_RELEASE_FLAGS.PARTITIONS.$(partition) := $(sort \
+ $(_ALL_RELEASE_FLAGS.PARTITIONS.$(partition)) $(strip $(2)))) \
+ )
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).PARTITIONS := $(declare-build-flag.partition))
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).DEFAULT := $(strip $(3)))
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).DECLARED_IN := $(_included))
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).VALUE := $(strip $(3)))
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(2)).SET_IN := $(_included))
+ $(eval declare-build-flag.partition:=)
+endef
+
+
+# Choose the files
+# If this is a google source tree, restrict it to only the one file
+# which has OWNERS control. If it isn't let others define their own.
+flag_declaration_files := $(wildcard build/release/build_flags.mk) \
+ $(if $(wildcard vendor/google/release/build_flags.mk), \
+ vendor/google/release/build_flags.mk, \
+ $(sort \
+ $(wildcard device/*/release/build_flags.mk) \
+ $(wildcard device/*/*/release/build_flags.mk) \
+ $(wildcard vendor/*/release/build_flags.mk) \
+ $(wildcard vendor/*/*/release/build_flags.mk) \
+ ) \
+ )
+
+# Include the files
+$(foreach f, $(flag_declaration_files), \
+ $(eval _included := $(f)) \
+ $(eval include $(f)) \
+)
+
+# Don't let anyone declare build flags after here
+define declare-build-flag
+$(error declare-build-flag can only be called from inside flag definition files.)
+endef
+
+# No more flags from here on
+.KATI_READONLY := _ALL_RELEASE_FLAGS
+
+# -----------------------------------------------------------------
+# Set the flags
+
+# $(1): Flag name. Must start with RELEASE_ and have been defined by declare-build-flag
+# $(2): Value. True or false
+define set-build-flag
+ $(if $(filter-out $(_ALL_RELEASE_FLAGS), $(strip $(1))), \
+ $(error set-build-flag: Undeclared build flag: $(strip $(1))) \
+ )
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(1)).VALUE := $(strip $(2)))
+ $(eval _ALL_RELEASE_FLAGS.$(strip $(1)).SET_IN := $(_included))
+endef
+
+# This writes directly to a file so that the version never exists in make for
+# people to write conditionals upon.
+define set-release-version
+ $(eval _RELEASE_VERSION := $(strip $(1)))
+endef
+
+# Include the files (if there are any)
+ifneq ($(strip $(_release_config_files)),)
+ $(foreach f, $(_release_config_files), \
+ $(eval _included := $(f)) \
+ $(eval include $(f)) \
+ )
+else
+ # No TARGET_RELEASE means release version 0
+ $(call set-release-version, 0)
+endif
+
+
+ifeq ($(_RELEASE_VERSION)),)
+ $(error No release config file called set-release-version. Included files were: $(_release_config_files))
+endif
+
+# Don't let anyone declare build flags after here
+define set-build-flag
+$(error set-build-flag can only be called from inside release config files.)
+endef
+
+# Don't let anyone set the release version after here
+define set-release-version
+$(error set-release-version can only be called from inside release config files.)
+endef
+
+# Set the flag values, and don't allow any one to modify them.
+$(foreach flag, $(_ALL_RELEASE_FLAGS), \
+ $(eval $(flag) := $(_ALL_RELEASE_FLAGS.$(flag).VALUE)) \
+ $(eval .KATI_READONLY := $(flag)) \
+)
+
+
+# -----------------------------------------------------------------
+# Clear out vars
+flag_declaration_files:=
+flag_files:=
+_included:=
+_release_config_files:=
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index dd550b5..ccc5449 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -239,26 +239,28 @@
include $(BUILD_SYSTEM)/link_type.mk
endif # !LOCAL_IS_HOST_MODULE
-ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
- $(call append_enforce_rro_sources, \
- $(my_register_name), \
- false, \
- $(LOCAL_FULL_MANIFEST_FILE), \
- $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
- $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
- vendor \
- )
-endif
+ifeq (,$(filter tests,$(LOCAL_MODULE_TAGS)))
+ ifdef LOCAL_SOONG_DEVICE_RRO_DIRS
+ $(call append_enforce_rro_sources, \
+ $(my_register_name), \
+ false, \
+ $(LOCAL_FULL_MANIFEST_FILE), \
+ $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+ $(LOCAL_SOONG_DEVICE_RRO_DIRS), \
+ vendor \
+ )
+ endif
-ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
- $(call append_enforce_rro_sources, \
- $(my_register_name), \
- false, \
- $(LOCAL_FULL_MANIFEST_FILE), \
- $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
- $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
- product \
- )
+ ifdef LOCAL_SOONG_PRODUCT_RRO_DIRS
+ $(call append_enforce_rro_sources, \
+ $(my_register_name), \
+ false, \
+ $(LOCAL_FULL_MANIFEST_FILE), \
+ $(if $(LOCAL_EXPORT_PACKAGE_RESOURCES),true,false), \
+ $(LOCAL_SOONG_PRODUCT_RRO_DIRS), \
+ product \
+ )
+ endif
endif
ifdef LOCAL_PREBUILT_COVERAGE_ARCHIVE
@@ -273,4 +275,4 @@
###########################################################
## SBOM generation
###########################################################
-include $(BUILD_SBOM_GEN)
\ No newline at end of file
+include $(BUILD_SBOM_GEN)
diff --git a/core/soong_config.mk b/core/soong_config.mk
index 6348cf0..8b9ba4f 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -2,16 +2,19 @@
SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android-$(TARGET_PRODUCT).mk
-BINDER32BIT :=
-ifneq ($(TARGET_USES_64_BIT_BINDER),true)
-ifneq ($(TARGET_IS_64_BIT),true)
-BINDER32BIT := true
-endif
-endif
-
include $(BUILD_SYSTEM)/art_config.mk
include $(BUILD_SYSTEM)/dex_preopt_config.mk
+ifndef AFDO_PROFILES
+# Set AFDO_PROFILES
+-include vendor/google_data/pgo_profile/sampling/afdo_profiles.mk
+else
+$(error AFDO_PROFILES can only be set from soong_config.mk. For product-specific fdo_profiles, please use PRODUCT_AFDO_PROFILES)
+endif
+
+# PRODUCT_AFDO_PROFILES takes precedence over product-agnostic profiles in AFDO_PROFILES
+ALL_AFDO_PROFILES := $(PRODUCT_AFDO_PROFILES) $(AFDO_PROFILES)
+
ifeq ($(WRITE_SOONG_VARIABLES),true)
# Create soong.variables with copies of makefile settings. Runs every build,
@@ -110,6 +113,7 @@
$(call add_json_list, CFIExcludePaths, $(CFI_EXCLUDE_PATHS) $(PRODUCT_CFI_EXCLUDE_PATHS))
$(call add_json_list, CFIIncludePaths, $(CFI_INCLUDE_PATHS) $(PRODUCT_CFI_INCLUDE_PATHS))
$(call add_json_list, IntegerOverflowExcludePaths, $(INTEGER_OVERFLOW_EXCLUDE_PATHS) $(PRODUCT_INTEGER_OVERFLOW_EXCLUDE_PATHS))
+$(call add_json_list, HWASanIncludePaths, $(HWASAN_INCLUDE_PATHS) $(PRODUCT_HWASAN_INCLUDE_PATHS))
$(call add_json_list, MemtagHeapExcludePaths, $(MEMTAG_HEAP_EXCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_EXCLUDE_PATHS))
$(call add_json_list, MemtagHeapAsyncIncludePaths, $(MEMTAG_HEAP_ASYNC_INCLUDE_PATHS) $(PRODUCT_MEMTAG_HEAP_ASYNC_INCLUDE_PATHS))
@@ -132,7 +136,6 @@
$(call add_json_bool, SamplingPGO, $(filter true,$(SAMPLING_PGO)))
$(call add_json_bool, ArtUseReadBarrier, $(call invert_bool,$(filter false,$(PRODUCT_ART_USE_READ_BARRIER))))
-$(call add_json_bool, Binder32bit, $(BINDER32BIT))
$(call add_json_str, BtConfigIncludeDir, $(BOARD_BLUETOOTH_BDROID_BUILDCFG_INCLUDE_DIR))
$(call add_json_list, DeviceKernelHeaders, $(TARGET_DEVICE_KERNEL_HEADERS) $(TARGET_BOARD_KERNEL_HEADERS) $(TARGET_PRODUCT_KERNEL_HEADERS))
$(call add_json_str, DeviceVndkVersion, $(BOARD_VNDK_VERSION))
@@ -146,6 +149,7 @@
$(call add_json_bool, Malloc_zero_contents, $(call invert_bool,$(filter false,$(MALLOC_ZERO_CONTENTS))))
$(call add_json_bool, Malloc_pattern_fill_contents, $(MALLOC_PATTERN_FILL_CONTENTS))
$(call add_json_str, Override_rs_driver, $(OVERRIDE_RS_DRIVER))
+$(call add_json_str, DeviceMaxPageSizeSupported, $(TARGET_MAX_PAGE_SIZE_SUPPORTED))
$(call add_json_bool, UncompressPrivAppDex, $(call invert_bool,$(filter true,$(DONT_UNCOMPRESS_PRIV_APPS_DEXS))))
$(call add_json_list, ModulesLoadedByPrivilegedModules, $(PRODUCT_LOADED_BY_PRIVILEGED_MODULES))
@@ -309,7 +313,14 @@
$(call add_json_list, IncludeTags, $(PRODUCT_INCLUDE_TAGS))
$(call add_json_list, SourceRootDirs, $(PRODUCT_SOURCE_ROOT_DIRS))
-$(call add_json_list, AfdoProfiles, $(PRODUCT_AFDO_PROFILES))
+$(call add_json_list, AfdoProfiles, $(ALL_AFDO_PROFILES))
+
+$(call add_json_str, ProductManufacturer, $(PRODUCT_MANUFACTURER))
+$(call add_json_str, ProductBrand, $(PRODUCT_BRAND))
+$(call add_json_list, BuildVersionTags, $(BUILD_VERSION_TAGS))
+
+$(call add_json_str, ReleaseVersion, $(_RELEASE_VERSION))
+$(call add_json_list, ReleaseDeviceConfigValueSets, $(RELEASE_DEVICE_CONFIG_VALUE_SETS))
$(call json_end)
diff --git a/core/tasks/collect_gpl_sources.mk b/core/tasks/collect_gpl_sources.mk
deleted file mode 100644
index 9e9ab8e..0000000
--- a/core/tasks/collect_gpl_sources.mk
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright (C) 2011 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# The rule below doesn't have dependenices on the files that it copies,
-# so manually generate into a PACKAGING intermediate dir, which is wiped
-# in installclean between incremental builds on build servers.
-gpl_source_tgz := $(call intermediates-dir-for,PACKAGING,gpl_source)/gpl_source.tgz
-
-ALL_GPL_MODULE_LICENSE_FILES := $(sort $(ALL_GPL_MODULE_LICENSE_FILES))
-
-# FORCE since we can't know whether any of the sources changed
-$(gpl_source_tgz): PRIVATE_PATHS := $(sort $(patsubst %/, %, $(dir $(ALL_GPL_MODULE_LICENSE_FILES))))
-$(gpl_source_tgz) : $(ALL_GPL_MODULE_LICENSE_FILES)
- @echo Package GPL sources: $@
- $(hide) tar cfz $@ --exclude ".git*" $(PRIVATE_PATHS)
-
-# Dist the tgz only if we are doing a full build
-$(call dist-for-goals,droidcore-unbundled,$(gpl_source_tgz))
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
index 9400890..8ae2a9a 100644
--- a/core/tasks/tools/compatibility.mk
+++ b/core/tasks/tools/compatibility.mk
@@ -30,8 +30,6 @@
out_dir := $(HOST_OUT)/$(test_suite_name)/$(test_suite_subdir)
test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
test_tools := $(HOST_OUT_JAVA_LIBRARIES)/tradefed.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/tradefed-no-fwk.jar \
- $(HOST_OUT_JAVA_LIBRARIES)/tradefed-test-framework.jar \
$(HOST_OUT_JAVA_LIBRARIES)/loganalysis.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
$(HOST_OUT_JAVA_LIBRARIES)/compatibility-tradefed.jar \
@@ -46,10 +44,16 @@
# The JDK to package into the test suite zip file. Always package the linux JDK.
test_suite_jdk_dir := $(ANDROID_JAVA_HOME)/../linux-x86
+ifndef test_suite_jdk_files
+ # This file gets included many times, so make sure we only run the $(shell) once.
+ # Otherwise it will slow down every build due to all copies of it being rerun when kati
+ # checks the stamp file.
+ test_suite_jdk_files :=$= $(shell find $(test_suite_jdk_dir) -type f | sort)
+endif
test_suite_jdk := $(call intermediates-dir-for,PACKAGING,$(test_suite_name)_jdk,HOST)/jdk.zip
$(test_suite_jdk): PRIVATE_JDK_DIR := $(test_suite_jdk_dir)
$(test_suite_jdk): PRIVATE_SUBDIR := $(test_suite_subdir)
-$(test_suite_jdk): $(shell find $(test_suite_jdk_dir) -type f | sort)
+$(test_suite_jdk): $(test_suite_jdk_files)
$(test_suite_jdk): $(SOONG_ZIP)
$(SOONG_ZIP) -o $@ -P $(PRIVATE_SUBDIR)/jdk -C $(PRIVATE_JDK_DIR) -D $(PRIVATE_JDK_DIR) -sha256
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 95b3d37..f9175e45 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -40,7 +40,7 @@
include $(INTERNAL_BUILD_ID_MAKEFILE)
endif
-DEFAULT_PLATFORM_VERSION := UP1A
+DEFAULT_PLATFORM_VERSION := VP1A
.KATI_READONLY := DEFAULT_PLATFORM_VERSION
MIN_PLATFORM_VERSION := UP1A
MAX_PLATFORM_VERSION := VP1A
@@ -91,7 +91,7 @@
Base Base11 Cupcake Donut Eclair Eclair01 EclairMr1 Froyo Gingerbread GingerbreadMr1 \
Honeycomb HoneycombMr1 HoneycombMr2 IceCreamSandwich IceCreamSandwichMr1 \
JellyBean JellyBeanMr1 JellyBeanMr2 Kitkat KitkatWatch Lollipop LollipopMr1 M N NMr1 O OMr1 P \
-Q R S Sv2 Tiramisu UpsideDownCake
+Q R S Sv2 Tiramisu UpsideDownCake VanillaIceCream
# Convert from space separated list to comma separated
PLATFORM_VERSION_KNOWN_CODENAMES := \
@@ -104,7 +104,7 @@
# It must be of the form "YYYY-MM-DD" on production devices.
# It must match one of the Android Security Patch Level strings of the Public Security Bulletins.
# If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
- PLATFORM_SECURITY_PATCH := 2023-03-05
+ PLATFORM_SECURITY_PATCH := 2023-05-05
endif
include $(BUILD_SYSTEM)/version_util.mk
diff --git a/envsetup.sh b/envsetup.sh
index 905635c..d292dbb 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -312,7 +312,7 @@
# would prevent exporting type info from those packages.
#
# http://b/266688086
- export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages:
+ export ANDROID_PYTHONPATH=$T/development/python-packages/adb:$T/development/python-packages/gdbrunner:$T/development/python-packages:
if [ -n $VENDOR_PYTHONPATH ]; then
ANDROID_PYTHONPATH=$ANDROID_PYTHONPATH$VENDOR_PYTHONPATH
fi
@@ -804,13 +804,19 @@
export TARGET_BUILD_APPS=
- local product variant_and_version variant version
+ # Support either <product>-<variant> or <product>-<release>-<variant>
+ local product release_and_variant release variant
product=${selection%%-*} # Trim everything after first dash
- variant_and_version=${selection#*-} # Trim everything up to first dash
- if [ "$variant_and_version" != "$selection" ]; then
- variant=${variant_and_version%%-*}
- if [ "$variant" != "$variant_and_version" ]; then
- version=${variant_and_version#*-}
+ release_and_variant=${selection#*-} # Trim everything up to first dash
+ if [ "$release_and_variant" != "$selection" ]; then
+ local first=${release_and_variant%%-*} # Trim everything after first dash
+ if [ "$first" != "$release_and_variant" ]; then
+ # There is a 2nd dash, split into release-variant
+ release=$first # Everything up to the dash
+ variant=${release_and_variant#*-} # Trim everything up to dash
+ else
+ # There is not a 2nd dash, default to variant as the second param
+ variant=$first
fi
fi
@@ -823,7 +829,7 @@
TARGET_PRODUCT=$product \
TARGET_BUILD_VARIANT=$variant \
- TARGET_PLATFORM_VERSION=$version \
+ TARGET_RELEASE=$release \
build_build_var_cache
if [ $? -ne 0 ]
then
@@ -835,10 +841,10 @@
fi
export TARGET_PRODUCT=$(get_build_var TARGET_PRODUCT)
export TARGET_BUILD_VARIANT=$(get_build_var TARGET_BUILD_VARIANT)
- if [ -n "$version" ]; then
- export TARGET_PLATFORM_VERSION=$(get_build_var TARGET_PLATFORM_VERSION)
+ if [ -n "$release" ]; then
+ export TARGET_RELEASE=$release
else
- unset TARGET_PLATFORM_VERSION
+ unset TARGET_RELEASE
fi
export TARGET_BUILD_TYPE=release
@@ -1096,12 +1102,12 @@
#
# Easy way to make system.img/etc writable
function syswrite() {
- adb wait-for-device && adb root || return 1
+ adb wait-for-device && adb root && adb wait-for-device || return 1
if [[ $(adb disable-verity | grep -i "reboot") ]]; then
echo "rebooting"
- adb reboot && adb wait-for-device && adb root || return 1
+ adb reboot && adb wait-for-device && adb root && adb wait-for-device || return 1
fi
- adb wait-for-device && adb remount || return 1
+ adb remount || return 1
}
# coredump_setup - enable core dumps globally for any process
diff --git a/target/board/BoardConfigGsiCommon.mk b/target/board/BoardConfigGsiCommon.mk
index 4d95b33..67e31df 100644
--- a/target/board/BoardConfigGsiCommon.mk
+++ b/target/board/BoardConfigGsiCommon.mk
@@ -36,6 +36,7 @@
TARGET_COPY_OUT_PRODUCT := system/product
TARGET_COPY_OUT_SYSTEM_EXT := system/system_ext
BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE :=
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE :=
# Creates metadata partition mount point under root for
# the devices with metadata parition
diff --git a/target/board/BoardConfigMainlineCommon.mk b/target/board/BoardConfigMainlineCommon.mk
index 00f6e5b..01ebe56 100644
--- a/target/board/BoardConfigMainlineCommon.mk
+++ b/target/board/BoardConfigMainlineCommon.mk
@@ -14,6 +14,8 @@
TARGET_COPY_OUT_SYSTEM_EXT := system_ext
TARGET_COPY_OUT_VENDOR := vendor
TARGET_COPY_OUT_PRODUCT := product
+BOARD_PRODUCTIMAGE_FILE_SYSTEM_TYPE := ext4
+BOARD_SYSTEM_EXTIMAGE_FILE_SYSTEM_TYPE := ext4
# Creates metadata partition mount point under root for
# the devices with metadata parition
@@ -22,9 +24,6 @@
# Default is current, but allow devices to override vndk version if needed.
BOARD_VNDK_VERSION ?= current
-# Required flag for non-64 bit devices from P.
-TARGET_USES_64_BIT_BINDER := true
-
# 64 bit mediadrmserver
TARGET_ENABLE_MEDIADRM_64 := true
diff --git a/target/board/BoardConfigModuleCommon.mk b/target/board/BoardConfigModuleCommon.mk
deleted file mode 100644
index 24c01a5..0000000
--- a/target/board/BoardConfigModuleCommon.mk
+++ /dev/null
@@ -1,6 +0,0 @@
-# BoardConfigModuleCommon.mk
-#
-# Common compile-time settings for module builds.
-
-# Required for all module devices.
-TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/emulator_arm64/device.mk b/target/board/emulator_arm64/device.mk
index dc84192..d221e64 100644
--- a/target/board/emulator_arm64/device.mk
+++ b/target/board/emulator_arm64/device.mk
@@ -17,12 +17,3 @@
PRODUCT_SOONG_NAMESPACES += device/generic/goldfish # for libwifi-hal-emu
PRODUCT_SOONG_NAMESPACES += device/generic/goldfish-opengl # for goldfish deps.
-# Cuttlefish has GKI kernel prebuilts, so use those for the GKI boot.img.
-ifeq ($(TARGET_PREBUILT_KERNEL),)
- LOCAL_KERNEL := kernel/prebuilts/5.4/arm64/kernel-5.4-lz4
-else
- LOCAL_KERNEL := $(TARGET_PREBUILT_KERNEL)
-endif
-
-PRODUCT_COPY_FILES += \
- $(LOCAL_KERNEL):kernel
diff --git a/target/board/mainline_sdk/BoardConfig.mk b/target/board/mainline_sdk/BoardConfig.mk
index f5c2dc6..84f8b2d 100644
--- a/target/board/mainline_sdk/BoardConfig.mk
+++ b/target/board/mainline_sdk/BoardConfig.mk
@@ -18,6 +18,3 @@
HOST_CROSS_OS := linux_bionic
HOST_CROSS_ARCH := x86_64
HOST_CROSS_2ND_ARCH :=
-
-# Required flag for non-64 bit devices from P.
-TARGET_USES_64_BIT_BINDER := true
diff --git a/target/board/module_arm/BoardConfig.mk b/target/board/module_arm/BoardConfig.mk
index 3f35c06..565efc8 100644
--- a/target/board/module_arm/BoardConfig.mk
+++ b/target/board/module_arm/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_ARCH := arm
TARGET_ARCH_VARIANT := armv7-a-neon
TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_arm64/BoardConfig.mk b/target/board/module_arm64/BoardConfig.mk
index 3700056..66e3792 100644
--- a/target/board/module_arm64/BoardConfig.mk
+++ b/target/board/module_arm64/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_ARCH := arm64
TARGET_ARCH_VARIANT := armv8-a
TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_arm64only/BoardConfig.mk b/target/board/module_arm64only/BoardConfig.mk
index 3cabf05..6c26579 100644
--- a/target/board/module_arm64only/BoardConfig.mk
+++ b/target/board/module_arm64only/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_ARCH := arm64
TARGET_ARCH_VARIANT := armv8-a
TARGET_CPU_VARIANT := generic
diff --git a/target/board/module_x86/BoardConfig.mk b/target/board/module_x86/BoardConfig.mk
index a93ac97..af3fffd 100644
--- a/target/board/module_x86/BoardConfig.mk
+++ b/target/board/module_x86/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_CPU_ABI := x86
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
diff --git a/target/board/module_x86_64/BoardConfig.mk b/target/board/module_x86_64/BoardConfig.mk
index 1ed3be0..1ada027 100644
--- a/target/board/module_x86_64/BoardConfig.mk
+++ b/target/board/module_x86_64/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_CPU_ABI := x86_64
TARGET_ARCH := x86_64
TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/module_x86_64only/BoardConfig.mk b/target/board/module_x86_64only/BoardConfig.mk
index b0676cb..5b86f0a 100644
--- a/target/board/module_x86_64only/BoardConfig.mk
+++ b/target/board/module_x86_64only/BoardConfig.mk
@@ -13,8 +13,6 @@
# limitations under the License.
#
-include build/make/target/board/BoardConfigModuleCommon.mk
-
TARGET_CPU_ABI := x86_64
TARGET_ARCH := x86_64
TARGET_ARCH_VARIANT := x86_64
diff --git a/target/board/ndk/BoardConfig.mk b/target/board/ndk/BoardConfig.mk
index da8b5f3..b485f8b 100644
--- a/target/board/ndk/BoardConfig.mk
+++ b/target/board/ndk/BoardConfig.mk
@@ -14,7 +14,6 @@
#
TARGET_ARCH_SUITE := ndk
-TARGET_USES_64_BIT_BINDER := true
MALLOC_SVELTE := true
diff --git a/target/product/angle_default.mk b/target/product/angle_default.mk
new file mode 100644
index 0000000..bea0be6
--- /dev/null
+++ b/target/product/angle_default.mk
@@ -0,0 +1,23 @@
+#
+# Copyright 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# To enable ANGLE as the default system GLES drivers, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_enabled.mk) to the Makefile.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk)
+
+PRODUCT_VENDOR_PROPERTIES += \
+ persist.graphics.egl=angle
diff --git a/target/product/angle_supported.mk b/target/product/angle_supported.mk
new file mode 100644
index 0000000..c83ff5f
--- /dev/null
+++ b/target/product/angle_supported.mk
@@ -0,0 +1,27 @@
+#
+# Copyright 2023 The Android Open-Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# To include ANGLE into the image build, add
+# $(call inherit-product, $(SRC_TARGET_DIR)/product/angle_supported.mk) to the Makefile.
+# By default, this will allow ANGLE binaries to coexist with native GLES drivers.
+
+PRODUCT_PACKAGES += \
+ libEGL_angle \
+ libGLESv1_CM_angle \
+ libGLESv2_angle
+
+# Set ro.gfx.angle.supported based on if ANGLE is installed in vendor partition
+PRODUCT_VENDOR_PROPERTIES += ro.gfx.angle.supported=true
diff --git a/target/product/aosp_64bitonly_x86_64.mk b/target/product/aosp_64bitonly_x86_64.mk
index 4de4e0c..75fd3c8 100644
--- a/target/product/aosp_64bitonly_x86_64.mk
+++ b/target/product/aosp_64bitonly_x86_64.mk
@@ -51,7 +51,6 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
@@ -59,6 +58,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_64bitonly_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 5f200aa..61c1316 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -57,6 +57,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_arm,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index ffc37a9..6c907db 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -62,6 +62,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_arm64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_riscv64.mk b/target/product/aosp_riscv64.mk
index bc35b95..270a989 100644
--- a/target/product/aosp_riscv64.mk
+++ b/target/product/aosp_riscv64.mk
@@ -46,7 +46,6 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/riscv64-vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_riscv64/device.mk)
@@ -54,6 +53,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_riscv64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 7db2c0f..a2f0390 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -47,7 +47,6 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86/device.mk)
@@ -56,6 +55,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_x86,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index d55866f..535ee3f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -56,7 +56,6 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/x86_64-vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/non_ab_device.mk)
@@ -65,6 +64,9 @@
# Special settings for GSI releasing
#
ifeq (aosp_x86_64,$(TARGET_PRODUCT))
+# Build modules from source if this has not been pre-configured
+MODULE_BUILD_FROM_SOURCE ?= true
+
$(call inherit-product, $(SRC_TARGET_DIR)/product/gsi_release.mk)
endif
diff --git a/target/product/aosp_x86_arm.mk b/target/product/aosp_x86_arm.mk
index f96e068..39ad0d8 100644
--- a/target/product/aosp_x86_arm.mk
+++ b/target/product/aosp_x86_arm.mk
@@ -45,7 +45,6 @@
#
# All components inherited here go to vendor image
#
-$(call inherit-product-if-exists, device/generic/goldfish/x86-vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/emulator_vendor.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_arm/device.mk)
diff --git a/target/product/base_system.mk b/target/product/base_system.mk
index 94b5c16..d65e5a4 100644
--- a/target/product/base_system.mk
+++ b/target/product/base_system.mk
@@ -226,7 +226,7 @@
mtpd \
ndc \
netd \
- NetworkStackNext \
+ NetworkStack \
odsign \
org.apache.http.legacy \
otacerts \
@@ -296,11 +296,9 @@
system_manifest.xml \
system_compatibility_matrix.xml \
-# HWASAN runtime for SANITIZE_TARGET=hwaddress builds
-ifneq (,$(filter hwaddress,$(SANITIZE_TARGET)))
- PRODUCT_PACKAGES += \
- libclang_rt.hwasan.bootstrap
-endif
+PRODUCT_PACKAGES_ARM64 := libclang_rt.hwasan \
+ libclang_rt.hwasan.bootstrap \
+ libc_hwasan \
# Jacoco agent JARS to be built and installed, if any.
ifeq ($(EMMA_INSTRUMENT),true)
diff --git a/target/product/gsi_release.mk b/target/product/gsi_release.mk
index 09d4bc9..3b97792 100644
--- a/target/product/gsi_release.mk
+++ b/target/product/gsi_release.mk
@@ -36,7 +36,7 @@
# GSI should always support up-to-date platform features.
# Keep this value at the latest API level to ensure latest build system
# default configs are applied.
-PRODUCT_SHIPPING_API_LEVEL := 31
+PRODUCT_SHIPPING_API_LEVEL := 34
# Enable dynamic partitions to facilitate mixing onto Cuttlefish
PRODUCT_USE_DYNAMIC_PARTITIONS := true
@@ -88,9 +88,6 @@
PRODUCT_BUILD_SYSTEM_DLKM_IMAGE := false
PRODUCT_EXPORT_BOOT_IMAGE_TO_DIST := true
-# Always build modules from source
-MODULE_BUILD_FROM_SOURCE := true
-
# Additional settings used in all GSI builds
PRODUCT_PRODUCT_PROPERTIES += \
ro.crypto.metadata_init_delete_all_keys.enabled=false \
diff --git a/target/product/virtual_ab_ota/android_t_baseline.mk b/target/product/virtual_ab_ota/android_t_baseline.mk
index 418aaa4..f862485 100644
--- a/target/product/virtual_ab_ota/android_t_baseline.mk
+++ b/target/product/virtual_ab_ota/android_t_baseline.mk
@@ -20,3 +20,5 @@
#
# All U+ launching devices should instead use vabc_features.mk.
$(call inherit-product, $(SRC_TARGET_DIR)/product/virtual_ab_ota/vabc_features.mk)
+
+PRODUCT_VIRTUAL_AB_COW_VERSION := 2
diff --git a/tests/product.rbc b/tests/product.rbc
index 9ae6393..b4c6d45 100644
--- a/tests/product.rbc
+++ b/tests/product.rbc
@@ -54,6 +54,7 @@
rblf.soong_config_append(g, "NS1", "v2", "def")
rblf.soong_config_set(g, "NS2", "v3", "abc")
rblf.soong_config_set(g, "NS2", "v3", "xyz")
+ rblf.soong_config_set(g, "NS2", "v4", "xyz ")
rblf.mkdist_for_goals(g, "goal", "dir1/file1:out1 dir1/file2:out2")
rblf.mkdist_for_goals(g, "goal", "dir2/file2:")
diff --git a/tests/run.rbc b/tests/run.rbc
index 33583eb..85d6c09 100644
--- a/tests/run.rbc
+++ b/tests/run.rbc
@@ -144,7 +144,8 @@
"v2": "def"
},
"NS2": {
- "v3": "xyz"
+ "v3": "xyz",
+ "v4": "xyz"
}
},
{k:v for k, v in sorted(ns.items()) }
diff --git a/tools/Android.bp b/tools/Android.bp
index e325f6b..bea0602 100644
--- a/tools/Android.bp
+++ b/tools/Android.bp
@@ -71,22 +71,6 @@
}
python_binary_host {
- name: "generate-sbom",
- srcs: [
- "generate-sbom.py",
- ],
- version: {
- py3: {
- embedded_launcher: true,
- },
- },
- libs: [
- "metadata_file_proto_py",
- "libprotobuf-python",
- ],
-}
-
-python_binary_host {
name: "list_files",
main: "list_files.py",
srcs: [
diff --git a/tools/aconfig/.gitignore b/tools/aconfig/.gitignore
new file mode 100644
index 0000000..1b72444
--- /dev/null
+++ b/tools/aconfig/.gitignore
@@ -0,0 +1,2 @@
+/Cargo.lock
+/target
diff --git a/tools/aconfig/Android.bp b/tools/aconfig/Android.bp
new file mode 100644
index 0000000..9617e0e
--- /dev/null
+++ b/tools/aconfig/Android.bp
@@ -0,0 +1,38 @@
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+rust_protobuf_host {
+ name: "libaconfig_protos",
+ protos: ["protos/aconfig.proto"],
+ crate_name: "aconfig_protos",
+ source_stem: "aconfig_protos",
+ use_protobuf3: true,
+}
+
+rust_defaults {
+ name: "aconfig.defaults",
+ edition: "2021",
+ clippy_lints: "android",
+ lints: "android",
+ srcs: ["src/main.rs"],
+ rustlibs: [
+ "libaconfig_protos",
+ "libanyhow",
+ "libclap",
+ "libprotobuf",
+ "libserde",
+ "libserde_json",
+ "libtinytemplate",
+ ],
+}
+
+rust_binary_host {
+ name: "aconfig",
+ defaults: ["aconfig.defaults"],
+}
+
+rust_test_host {
+ name: "aconfig.test",
+ defaults: ["aconfig.defaults"],
+}
diff --git a/tools/aconfig/Cargo.toml b/tools/aconfig/Cargo.toml
new file mode 100644
index 0000000..8517dd2
--- /dev/null
+++ b/tools/aconfig/Cargo.toml
@@ -0,0 +1,20 @@
+[package]
+name = "aconfig"
+version = "0.1.0"
+edition = "2021"
+build = "build.rs"
+
+[features]
+default = ["cargo"]
+cargo = []
+
+[dependencies]
+anyhow = "1.0.69"
+clap = { version = "4.1.8", features = ["derive"] }
+protobuf = "3.2.0"
+serde = { version = "1.0.152", features = ["derive"] }
+serde_json = "1.0.93"
+tinytemplate = "1.2.1"
+
+[build-dependencies]
+protobuf-codegen = "3.2.0"
diff --git a/tools/aconfig/MODULE_LICENSE_APACHE2 b/tools/aconfig/MODULE_LICENSE_APACHE2
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/tools/aconfig/MODULE_LICENSE_APACHE2
diff --git a/tools/aconfig/OWNERS b/tools/aconfig/OWNERS
new file mode 100644
index 0000000..4e05b00
--- /dev/null
+++ b/tools/aconfig/OWNERS
@@ -0,0 +1,5 @@
+amhk@google.com
+jham@google.com
+joeo@google.com
+opg@google.com
+zhidou@google.com
diff --git a/tools/aconfig/PREUPLOAD.cfg b/tools/aconfig/PREUPLOAD.cfg
new file mode 100644
index 0000000..75ed57c
--- /dev/null
+++ b/tools/aconfig/PREUPLOAD.cfg
@@ -0,0 +1,5 @@
+[Builtin Hooks]
+rustfmt = true
+
+[Builtin Hooks Options]
+rustfmt = --config-path=rustfmt.toml
diff --git a/tools/aconfig/build.rs b/tools/aconfig/build.rs
new file mode 100644
index 0000000..5ef5b60
--- /dev/null
+++ b/tools/aconfig/build.rs
@@ -0,0 +1,17 @@
+use protobuf_codegen::Codegen;
+
+fn main() {
+ let proto_files = vec!["protos/aconfig.proto"];
+
+ // tell cargo to only re-run the build script if any of the proto files has changed
+ for path in &proto_files {
+ println!("cargo:rerun-if-changed={}", path);
+ }
+
+ Codegen::new()
+ .pure()
+ .include("protos")
+ .inputs(proto_files)
+ .cargo_out_dir("aconfig_proto")
+ .run_from_script();
+}
diff --git a/tools/aconfig/protos/aconfig.proto b/tools/aconfig/protos/aconfig.proto
new file mode 100644
index 0000000..9d36a9e
--- /dev/null
+++ b/tools/aconfig/protos/aconfig.proto
@@ -0,0 +1,78 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License
+
+// This is the schema definition for aconfig files. Modifications need to be
+// either backwards compatible, or include updates to all aconfig files in the
+// Android tree.
+
+syntax = "proto2";
+
+package android.aconfig;
+
+// messages used in both aconfig input and output
+
+enum flag_state {
+ ENABLED = 1;
+ DISABLED = 2;
+}
+
+enum flag_permission {
+ READ_ONLY = 1;
+ READ_WRITE = 2;
+}
+
+// aconfig input messages: flag declarations and values
+
+message flag_declaration {
+ required string name = 1;
+ required string description = 2;
+};
+
+message flag_declarations {
+ required string namespace = 1;
+ repeated flag_declaration flag = 2;
+};
+
+message flag_value {
+ required string namespace = 1;
+ required string name = 2;
+ required flag_state state = 3;
+ required flag_permission permission = 4;
+};
+
+message flag_values {
+ repeated flag_value flag_value = 1;
+};
+
+// aconfig output messages: parsed and verified flag declarations and values
+
+message tracepoint {
+ // path to declaration or value file relative to $TOP
+ required string source = 1;
+ required flag_state state = 2;
+ required flag_permission permission = 3;
+}
+
+message parsed_flag {
+ required string namespace = 1;
+ required string name = 2;
+ required string description = 3;
+ required flag_state state = 4;
+ required flag_permission permission = 5;
+ repeated tracepoint trace = 6;
+}
+
+message parsed_flags {
+ repeated parsed_flag parsed_flag = 1;
+}
diff --git a/tools/aconfig/rustfmt.toml b/tools/aconfig/rustfmt.toml
new file mode 120000
index 0000000..291e99b
--- /dev/null
+++ b/tools/aconfig/rustfmt.toml
@@ -0,0 +1 @@
+../../../soong/scripts/rustfmt.toml
\ No newline at end of file
diff --git a/tools/aconfig/src/aconfig.rs b/tools/aconfig/src/aconfig.rs
new file mode 100644
index 0000000..b9fa324
--- /dev/null
+++ b/tools/aconfig/src/aconfig.rs
@@ -0,0 +1,289 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{anyhow, bail, Context, Error, Result};
+use protobuf::{Enum, EnumOrUnknown};
+use serde::{Deserialize, Serialize};
+
+use crate::cache::{Cache, Item, Tracepoint};
+use crate::protos::{
+ ProtoFlagDeclaration, ProtoFlagDeclarations, ProtoFlagPermission, ProtoFlagState,
+ ProtoFlagValue, ProtoFlagValues, ProtoParsedFlag, ProtoParsedFlags, ProtoTracepoint,
+};
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum FlagState {
+ Enabled,
+ Disabled,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagState>> for FlagState {
+ type Error = Error;
+
+ fn try_from(proto: EnumOrUnknown<ProtoFlagState>) -> Result<Self, Self::Error> {
+ match ProtoFlagState::from_i32(proto.value()) {
+ Some(ProtoFlagState::ENABLED) => Ok(FlagState::Enabled),
+ Some(ProtoFlagState::DISABLED) => Ok(FlagState::Disabled),
+ None => Err(anyhow!("unknown flag state enum value {}", proto.value())),
+ }
+ }
+}
+
+impl From<FlagState> for ProtoFlagState {
+ fn from(state: FlagState) -> Self {
+ match state {
+ FlagState::Enabled => ProtoFlagState::ENABLED,
+ FlagState::Disabled => ProtoFlagState::DISABLED,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq, Serialize, Deserialize, Clone, Copy)]
+pub enum Permission {
+ ReadOnly,
+ ReadWrite,
+}
+
+impl TryFrom<EnumOrUnknown<ProtoFlagPermission>> for Permission {
+ type Error = Error;
+
+ fn try_from(proto: EnumOrUnknown<ProtoFlagPermission>) -> Result<Self, Self::Error> {
+ match ProtoFlagPermission::from_i32(proto.value()) {
+ Some(ProtoFlagPermission::READ_ONLY) => Ok(Permission::ReadOnly),
+ Some(ProtoFlagPermission::READ_WRITE) => Ok(Permission::ReadWrite),
+ None => Err(anyhow!("unknown permission enum value {}", proto.value())),
+ }
+ }
+}
+
+impl From<Permission> for ProtoFlagPermission {
+ fn from(permission: Permission) -> Self {
+ match permission {
+ Permission::ReadOnly => ProtoFlagPermission::READ_ONLY,
+ Permission::ReadWrite => ProtoFlagPermission::READ_WRITE,
+ }
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagDeclaration {
+ pub name: String,
+ pub description: String,
+}
+
+impl FlagDeclaration {
+ #[allow(dead_code)] // only used in unit tests
+ pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclaration> {
+ let proto: ProtoFlagDeclaration = crate::protos::try_from_text_proto(text_proto)
+ .with_context(|| text_proto.to_owned())?;
+ proto.try_into()
+ }
+}
+
+impl TryFrom<ProtoFlagDeclaration> for FlagDeclaration {
+ type Error = Error;
+
+ fn try_from(proto: ProtoFlagDeclaration) -> Result<Self, Self::Error> {
+ let Some(name) = proto.name else {
+ bail!("missing 'name' field");
+ };
+ let Some(description) = proto.description else {
+ bail!("missing 'description' field");
+ };
+ Ok(FlagDeclaration { name, description })
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagDeclarations {
+ pub namespace: String,
+ pub flags: Vec<FlagDeclaration>,
+}
+
+impl FlagDeclarations {
+ pub fn try_from_text_proto(text_proto: &str) -> Result<FlagDeclarations> {
+ let proto: ProtoFlagDeclarations = crate::protos::try_from_text_proto(text_proto)
+ .with_context(|| text_proto.to_owned())?;
+ let Some(namespace) = proto.namespace else {
+ bail!("missing 'namespace' field");
+ };
+ let mut flags = vec![];
+ for proto_flag in proto.flag.into_iter() {
+ flags.push(proto_flag.try_into()?);
+ }
+ Ok(FlagDeclarations { namespace, flags })
+ }
+}
+
+#[derive(Debug, PartialEq, Eq)]
+pub struct FlagValue {
+ pub namespace: String,
+ pub name: String,
+ pub state: FlagState,
+ pub permission: Permission,
+}
+
+impl FlagValue {
+ #[allow(dead_code)] // only used in unit tests
+ pub fn try_from_text_proto(text_proto: &str) -> Result<FlagValue> {
+ let proto: ProtoFlagValue = crate::protos::try_from_text_proto(text_proto)?;
+ proto.try_into()
+ }
+
+ pub fn try_from_text_proto_list(text_proto: &str) -> Result<Vec<FlagValue>> {
+ let proto: ProtoFlagValues = crate::protos::try_from_text_proto(text_proto)?;
+ proto.flag_value.into_iter().map(|proto_flag| proto_flag.try_into()).collect()
+ }
+}
+
+impl TryFrom<ProtoFlagValue> for FlagValue {
+ type Error = Error;
+
+ fn try_from(proto: ProtoFlagValue) -> Result<Self, Self::Error> {
+ let Some(namespace) = proto.namespace else {
+ bail!("missing 'namespace' field");
+ };
+ let Some(name) = proto.name else {
+ bail!("missing 'name' field");
+ };
+ let Some(proto_state) = proto.state else {
+ bail!("missing 'state' field");
+ };
+ let state = proto_state.try_into()?;
+ let Some(proto_permission) = proto.permission else {
+ bail!("missing 'permission' field");
+ };
+ let permission = proto_permission.try_into()?;
+ Ok(FlagValue { namespace, name, state, permission })
+ }
+}
+
+impl From<Cache> for ProtoParsedFlags {
+ fn from(cache: Cache) -> Self {
+ let mut proto = ProtoParsedFlags::new();
+ for item in cache.into_iter() {
+ proto.parsed_flag.push(item.into());
+ }
+ proto
+ }
+}
+
+impl From<Item> for ProtoParsedFlag {
+ fn from(item: Item) -> Self {
+ let mut proto = crate::protos::ProtoParsedFlag::new();
+ proto.set_namespace(item.namespace.to_owned());
+ proto.set_name(item.name.clone());
+ proto.set_description(item.description.clone());
+ proto.set_state(item.state.into());
+ proto.set_permission(item.permission.into());
+ for trace in item.trace.into_iter() {
+ proto.trace.push(trace.into());
+ }
+ proto
+ }
+}
+
+impl From<Tracepoint> for ProtoTracepoint {
+ fn from(tracepoint: Tracepoint) -> Self {
+ let mut proto = ProtoTracepoint::new();
+ proto.set_source(format!("{}", tracepoint.source));
+ proto.set_state(tracepoint.state.into());
+ proto.set_permission(tracepoint.permission.into());
+ proto
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_flag_try_from_text_proto() {
+ let expected = FlagDeclaration {
+ name: "1234".to_owned(),
+ description: "Description of the flag".to_owned(),
+ };
+
+ let s = r#"
+ name: "1234"
+ description: "Description of the flag"
+ "#;
+ let actual = FlagDeclaration::try_from_text_proto(s).unwrap();
+
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_flag_try_from_text_proto_bad_input() {
+ let s = r#"
+ name: "a"
+ "#;
+ let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
+ assert!(format!("{:?}", error).contains("Message not initialized"));
+
+ let s = r#"
+ description: "Description of the flag"
+ "#;
+ let error = FlagDeclaration::try_from_text_proto(s).unwrap_err();
+ assert!(format!("{:?}", error).contains("Message not initialized"));
+ }
+
+ #[test]
+ fn test_namespace_try_from_text_proto() {
+ let expected = FlagDeclarations {
+ namespace: "ns".to_owned(),
+ flags: vec![
+ FlagDeclaration { name: "a".to_owned(), description: "A".to_owned() },
+ FlagDeclaration { name: "b".to_owned(), description: "B".to_owned() },
+ ],
+ };
+
+ let s = r#"
+ namespace: "ns"
+ flag {
+ name: "a"
+ description: "A"
+ }
+ flag {
+ name: "b"
+ description: "B"
+ }
+ "#;
+ let actual = FlagDeclarations::try_from_text_proto(s).unwrap();
+
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn test_flag_declaration_try_from_text_proto_list() {
+ let expected = FlagValue {
+ namespace: "ns".to_owned(),
+ name: "1234".to_owned(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ };
+
+ let s = r#"
+ namespace: "ns"
+ name: "1234"
+ state: ENABLED
+ permission: READ_ONLY
+ "#;
+ let actual = FlagValue::try_from_text_proto(s).unwrap();
+
+ assert_eq!(expected, actual);
+ }
+}
diff --git a/tools/aconfig/src/cache.rs b/tools/aconfig/src/cache.rs
new file mode 100644
index 0000000..c546f7b
--- /dev/null
+++ b/tools/aconfig/src/cache.rs
@@ -0,0 +1,296 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{bail, ensure, Result};
+use serde::{Deserialize, Serialize};
+use std::io::{Read, Write};
+
+use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
+use crate::commands::Source;
+
+const DEFAULT_FLAG_STATE: FlagState = FlagState::Disabled;
+const DEFAULT_FLAG_PERMISSION: Permission = Permission::ReadWrite;
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Tracepoint {
+ pub source: Source,
+ pub state: FlagState,
+ pub permission: Permission,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Item {
+ // TODO: duplicating the Cache.namespace as Item.namespace makes the internal representation
+ // closer to the proto message `parsed_flag`; hopefully this will enable us to replace the Item
+ // struct and use a newtype instead once aconfig has matured. Until then, namespace should
+ // really be a Cow<String>.
+ pub namespace: String,
+ pub name: String,
+ pub description: String,
+ pub state: FlagState,
+ pub permission: Permission,
+ pub trace: Vec<Tracepoint>,
+}
+
+#[derive(Serialize, Deserialize, Debug)]
+pub struct Cache {
+ namespace: String,
+ items: Vec<Item>,
+}
+
+impl Cache {
+ pub fn new(namespace: String) -> Result<Cache> {
+ ensure!(!namespace.is_empty(), "empty namespace");
+ Ok(Cache { namespace, items: vec![] })
+ }
+
+ pub fn read_from_reader(reader: impl Read) -> Result<Cache> {
+ serde_json::from_reader(reader).map_err(|e| e.into())
+ }
+
+ pub fn write_to_writer(&self, writer: impl Write) -> Result<()> {
+ serde_json::to_writer(writer, self).map_err(|e| e.into())
+ }
+
+ pub fn add_flag_declaration(
+ &mut self,
+ source: Source,
+ declaration: FlagDeclaration,
+ ) -> Result<()> {
+ ensure!(!declaration.name.is_empty(), "empty flag name");
+ ensure!(!declaration.description.is_empty(), "empty flag description");
+ ensure!(
+ self.items.iter().all(|item| item.name != declaration.name),
+ "failed to declare flag {} from {}: flag already declared",
+ declaration.name,
+ source
+ );
+ self.items.push(Item {
+ namespace: self.namespace.clone(),
+ name: declaration.name.clone(),
+ description: declaration.description,
+ state: DEFAULT_FLAG_STATE,
+ permission: DEFAULT_FLAG_PERMISSION,
+ trace: vec![Tracepoint {
+ source,
+ state: DEFAULT_FLAG_STATE,
+ permission: DEFAULT_FLAG_PERMISSION,
+ }],
+ });
+ Ok(())
+ }
+
+ pub fn add_flag_value(&mut self, source: Source, value: FlagValue) -> Result<()> {
+ ensure!(!value.namespace.is_empty(), "empty flag namespace");
+ ensure!(!value.name.is_empty(), "empty flag name");
+ ensure!(
+ value.namespace == self.namespace,
+ "failed to set values for flag {}/{} from {}: expected namespace {}",
+ value.namespace,
+ value.name,
+ source,
+ self.namespace
+ );
+ let Some(existing_item) = self.items.iter_mut().find(|item| item.name == value.name) else {
+ bail!("failed to set values for flag {}/{} from {}: flag not declared", value.namespace, value.name, source);
+ };
+ existing_item.state = value.state;
+ existing_item.permission = value.permission;
+ existing_item.trace.push(Tracepoint {
+ source,
+ state: value.state,
+ permission: value.permission,
+ });
+ Ok(())
+ }
+
+ pub fn iter(&self) -> impl Iterator<Item = &Item> {
+ self.items.iter()
+ }
+
+ pub fn into_iter(self) -> impl Iterator<Item = Item> {
+ self.items.into_iter()
+ }
+
+ pub fn namespace(&self) -> &str {
+ debug_assert!(!self.namespace.is_empty());
+ &self.namespace
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::aconfig::{FlagState, Permission};
+
+ #[test]
+ fn test_add_flag_declaration() {
+ let mut cache = Cache::new("ns".to_string()).unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("first.txt".to_string()),
+ FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
+ )
+ .unwrap();
+ let error = cache
+ .add_flag_declaration(
+ Source::File("second.txt".to_string()),
+ FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
+ )
+ .unwrap_err();
+ assert_eq!(
+ &format!("{:?}", error),
+ "failed to declare flag foo from second.txt: flag already declared"
+ );
+ }
+
+ #[test]
+ fn test_add_flag_value() {
+ fn check(cache: &Cache, name: &str, expected: (FlagState, Permission)) -> bool {
+ let item = cache.iter().find(|&item| item.name == name).unwrap();
+ item.state == expected.0 && item.permission == expected.1
+ }
+
+ let mut cache = Cache::new("ns".to_string()).unwrap();
+ let error = cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "ns".to_string(),
+ name: "foo".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap_err();
+ assert_eq!(
+ &format!("{:?}", error),
+ "failed to set values for flag ns/foo from <memory>: flag not declared"
+ );
+
+ cache
+ .add_flag_declaration(
+ Source::File("first.txt".to_string()),
+ FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
+ )
+ .unwrap();
+ assert!(check(&cache, "foo", (DEFAULT_FLAG_STATE, DEFAULT_FLAG_PERMISSION)));
+
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "ns".to_string(),
+ name: "foo".to_string(),
+ state: FlagState::Disabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap();
+ assert!(check(&cache, "foo", (FlagState::Disabled, Permission::ReadOnly)));
+
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "ns".to_string(),
+ name: "foo".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadWrite,
+ },
+ )
+ .unwrap();
+ assert!(check(&cache, "foo", (FlagState::Enabled, Permission::ReadWrite)));
+
+ // different namespace -> no-op
+ let error = cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "some-other-namespace".to_string(),
+ name: "foo".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap_err();
+ assert_eq!(&format!("{:?}", error), "failed to set values for flag some-other-namespace/foo from <memory>: expected namespace ns");
+ assert!(check(&cache, "foo", (FlagState::Enabled, Permission::ReadWrite)));
+ }
+
+ #[test]
+ fn test_reject_empty_cache_namespace() {
+ Cache::new("".to_string()).unwrap_err();
+ }
+
+ #[test]
+ fn test_reject_empty_flag_declaration_fields() {
+ let mut cache = Cache::new("ns".to_string()).unwrap();
+
+ let error = cache
+ .add_flag_declaration(
+ Source::Memory,
+ FlagDeclaration { name: "".to_string(), description: "Description".to_string() },
+ )
+ .unwrap_err();
+ assert_eq!(&format!("{:?}", error), "empty flag name");
+
+ let error = cache
+ .add_flag_declaration(
+ Source::Memory,
+ FlagDeclaration { name: "foo".to_string(), description: "".to_string() },
+ )
+ .unwrap_err();
+ assert_eq!(&format!("{:?}", error), "empty flag description");
+ }
+
+ #[test]
+ fn test_reject_empty_flag_value_files() {
+ let mut cache = Cache::new("ns".to_string()).unwrap();
+ cache
+ .add_flag_declaration(
+ Source::Memory,
+ FlagDeclaration { name: "foo".to_string(), description: "desc".to_string() },
+ )
+ .unwrap();
+
+ let error = cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "".to_string(),
+ name: "foo".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap_err();
+ assert_eq!(&format!("{:?}", error), "empty flag namespace");
+
+ let error = cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: "ns".to_string(),
+ name: "".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap_err();
+ assert_eq!(&format!("{:?}", error), "empty flag name");
+ }
+}
diff --git a/tools/aconfig/src/codegen_cpp.rs b/tools/aconfig/src/codegen_cpp.rs
new file mode 100644
index 0000000..cb266f1
--- /dev/null
+++ b/tools/aconfig/src/codegen_cpp.rs
@@ -0,0 +1,216 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use serde::Serialize;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+use crate::commands::OutputFile;
+
+pub fn generate_cpp_code(cache: &Cache) -> Result<OutputFile> {
+ let class_elements: Vec<ClassElement> = cache.iter().map(create_class_element).collect();
+ let readwrite = class_elements.iter().any(|item| item.readwrite);
+ let namespace = cache.namespace().to_lowercase();
+ let context = Context { namespace: namespace.clone(), readwrite, class_elements };
+ let mut template = TinyTemplate::new();
+ template.add_template("cpp_code_gen", include_str!("../templates/cpp.template"))?;
+ let contents = template.render("cpp_code_gen", &context)?;
+ let path = ["aconfig", &(namespace + ".h")].iter().collect();
+ Ok(OutputFile { contents: contents.into(), path })
+}
+
+#[derive(Serialize)]
+struct Context {
+ pub namespace: String,
+ pub readwrite: bool,
+ pub class_elements: Vec<ClassElement>,
+}
+
+#[derive(Serialize)]
+struct ClassElement {
+ pub readwrite: bool,
+ pub default_value: String,
+ pub flag_name: String,
+}
+
+fn create_class_element(item: &Item) -> ClassElement {
+ ClassElement {
+ readwrite: item.permission == Permission::ReadWrite,
+ default_value: if item.state == FlagState::Enabled {
+ "true".to_string()
+ } else {
+ "false".to_string()
+ },
+ flag_name: item.name.clone(),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::aconfig::{FlagDeclaration, FlagState, FlagValue, Permission};
+ use crate::commands::Source;
+
+ #[test]
+ fn test_cpp_codegen_build_time_flag_only() {
+ let namespace = "my_namespace";
+ let mut cache = Cache::new(namespace.to_string()).unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("aconfig_one.txt".to_string()),
+ FlagDeclaration {
+ name: "my_flag_one".to_string(),
+ description: "buildtime disable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: namespace.to_string(),
+ name: "my_flag_one".to_string(),
+ state: FlagState::Disabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("aconfig_two.txt".to_string()),
+ FlagDeclaration {
+ name: "my_flag_two".to_string(),
+ description: "buildtime enable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: namespace.to_string(),
+ name: "my_flag_two".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap();
+ let expect_content = r#"#ifndef my_namespace_HEADER_H
+ #define my_namespace_HEADER_H
+ #include "my_namespace.h"
+
+ namespace my_namespace {
+
+ class my_flag_one {
+ public:
+ virtual const bool value() {
+ return false;
+ }
+ }
+
+ class my_flag_two {
+ public:
+ virtual const bool value() {
+ return true;
+ }
+ }
+
+ }
+ #endif
+ "#;
+ let file = generate_cpp_code(&cache).unwrap();
+ assert_eq!("aconfig/my_namespace.h", file.path.to_str().unwrap());
+ assert_eq!(
+ expect_content.replace(' ', ""),
+ String::from_utf8(file.contents).unwrap().replace(' ', "")
+ );
+ }
+
+ #[test]
+ fn test_cpp_codegen_runtime_flag() {
+ let namespace = "my_namespace";
+ let mut cache = Cache::new(namespace.to_string()).unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("aconfig_one.txt".to_string()),
+ FlagDeclaration {
+ name: "my_flag_one".to_string(),
+ description: "buildtime disable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("aconfig_two.txt".to_string()),
+ FlagDeclaration {
+ name: "my_flag_two".to_string(),
+ description: "runtime enable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: namespace.to_string(),
+ name: "my_flag_two".to_string(),
+ state: FlagState::Enabled,
+ permission: Permission::ReadWrite,
+ },
+ )
+ .unwrap();
+ let expect_content = r#"#ifndef my_namespace_HEADER_H
+ #define my_namespace_HEADER_H
+ #include "my_namespace.h"
+
+ #include <server_configurable_flags/get_flags.h>
+ using namespace server_configurable_flags;
+
+ namespace my_namespace {
+
+ class my_flag_one {
+ public:
+ virtual const bool value() {
+ return GetServerConfigurableFlag(
+ "my_namespace",
+ "my_flag_one",
+ "false") == "true";
+ }
+ }
+
+ class my_flag_two {
+ public:
+ virtual const bool value() {
+ return GetServerConfigurableFlag(
+ "my_namespace",
+ "my_flag_two",
+ "true") == "true";
+ }
+ }
+
+ }
+ #endif
+ "#;
+ let file = generate_cpp_code(&cache).unwrap();
+ assert_eq!("aconfig/my_namespace.h", file.path.to_str().unwrap());
+ assert_eq!(
+ expect_content.replace(' ', ""),
+ String::from_utf8(file.contents).unwrap().replace(' ', "")
+ );
+ }
+}
diff --git a/tools/aconfig/src/codegen_java.rs b/tools/aconfig/src/codegen_java.rs
new file mode 100644
index 0000000..476a89d
--- /dev/null
+++ b/tools/aconfig/src/codegen_java.rs
@@ -0,0 +1,136 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::Result;
+use serde::Serialize;
+use std::path::PathBuf;
+use tinytemplate::TinyTemplate;
+
+use crate::aconfig::{FlagState, Permission};
+use crate::cache::{Cache, Item};
+use crate::commands::OutputFile;
+
+pub fn generate_java_code(cache: &Cache) -> Result<OutputFile> {
+ let class_elements: Vec<ClassElement> = cache.iter().map(create_class_element).collect();
+ let readwrite = class_elements.iter().any(|item| item.readwrite);
+ let namespace = cache.namespace();
+ let context = Context { namespace: namespace.to_string(), readwrite, class_elements };
+ let mut template = TinyTemplate::new();
+ template.add_template("java_code_gen", include_str!("../templates/java.template"))?;
+ let contents = template.render("java_code_gen", &context)?;
+ let mut path: PathBuf = namespace.split('.').collect();
+ // TODO: Allow customization of the java class name
+ path.push("Flags.java");
+ Ok(OutputFile { contents: contents.into(), path })
+}
+
+#[derive(Serialize)]
+struct Context {
+ pub namespace: String,
+ pub readwrite: bool,
+ pub class_elements: Vec<ClassElement>,
+}
+
+#[derive(Serialize)]
+struct ClassElement {
+ pub method_name: String,
+ pub readwrite: bool,
+ pub default_value: String,
+ pub feature_name: String,
+ pub flag_name: String,
+}
+
+fn create_class_element(item: &Item) -> ClassElement {
+ ClassElement {
+ method_name: item.name.clone(),
+ readwrite: item.permission == Permission::ReadWrite,
+ default_value: if item.state == FlagState::Enabled {
+ "true".to_string()
+ } else {
+ "false".to_string()
+ },
+ feature_name: item.name.clone(),
+ flag_name: item.name.clone(),
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::aconfig::{FlagDeclaration, FlagValue};
+ use crate::commands::Source;
+
+ #[test]
+ fn test_generate_java_code() {
+ let namespace = "com.example";
+ let mut cache = Cache::new(namespace.to_string()).unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("test.txt".to_string()),
+ FlagDeclaration {
+ name: "test".to_string(),
+ description: "buildtime enable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_declaration(
+ Source::File("test2.txt".to_string()),
+ FlagDeclaration {
+ name: "test2".to_string(),
+ description: "runtime disable".to_string(),
+ },
+ )
+ .unwrap();
+ cache
+ .add_flag_value(
+ Source::Memory,
+ FlagValue {
+ namespace: namespace.to_string(),
+ name: "test".to_string(),
+ state: FlagState::Disabled,
+ permission: Permission::ReadOnly,
+ },
+ )
+ .unwrap();
+ let expect_content = r#"package com.example;
+
+ import android.provider.DeviceConfig;
+
+ public final class Flags {
+
+ public static boolean test() {
+ return false;
+ }
+
+ public static boolean test2() {
+ return DeviceConfig.getBoolean(
+ "com.example",
+ "test2__test2",
+ false
+ );
+ }
+
+ }
+ "#;
+ let file = generate_java_code(&cache).unwrap();
+ assert_eq!("com/example/Flags.java", file.path.to_str().unwrap());
+ assert_eq!(
+ expect_content.replace(' ', ""),
+ String::from_utf8(file.contents).unwrap().replace(' ', "")
+ );
+ }
+}
diff --git a/tools/aconfig/src/commands.rs b/tools/aconfig/src/commands.rs
new file mode 100644
index 0000000..0bdb0b5
--- /dev/null
+++ b/tools/aconfig/src/commands.rs
@@ -0,0 +1,208 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+use anyhow::{ensure, Context, Result};
+use clap::ValueEnum;
+use protobuf::Message;
+use serde::{Deserialize, Serialize};
+use std::fmt;
+use std::io::Read;
+use std::path::PathBuf;
+
+use crate::aconfig::{FlagDeclarations, FlagValue};
+use crate::cache::Cache;
+use crate::codegen_cpp::generate_cpp_code;
+use crate::codegen_java::generate_java_code;
+use crate::protos::ProtoParsedFlags;
+
+#[derive(Serialize, Deserialize, Clone, Debug)]
+pub enum Source {
+ #[allow(dead_code)] // only used in unit tests
+ Memory,
+ File(String),
+}
+
+impl fmt::Display for Source {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ match self {
+ Self::Memory => write!(f, "<memory>"),
+ Self::File(path) => write!(f, "{}", path),
+ }
+ }
+}
+
+pub struct Input {
+ pub source: Source,
+ pub reader: Box<dyn Read>,
+}
+
+pub struct OutputFile {
+ pub path: PathBuf, // relative to some root directory only main knows about
+ pub contents: Vec<u8>,
+}
+
+pub fn create_cache(
+ namespace: &str,
+ declarations: Vec<Input>,
+ values: Vec<Input>,
+) -> Result<Cache> {
+ let mut cache = Cache::new(namespace.to_owned())?;
+
+ for mut input in declarations {
+ let mut contents = String::new();
+ input.reader.read_to_string(&mut contents)?;
+ let dec_list = FlagDeclarations::try_from_text_proto(&contents)
+ .with_context(|| format!("Failed to parse {}", input.source))?;
+ ensure!(
+ namespace == dec_list.namespace,
+ "Failed to parse {}: expected namespace {}, got {}",
+ input.source,
+ namespace,
+ dec_list.namespace
+ );
+ for d in dec_list.flags.into_iter() {
+ cache.add_flag_declaration(input.source.clone(), d)?;
+ }
+ }
+
+ for mut input in values {
+ let mut contents = String::new();
+ input.reader.read_to_string(&mut contents)?;
+ let values_list = FlagValue::try_from_text_proto_list(&contents)
+ .with_context(|| format!("Failed to parse {}", input.source))?;
+ for v in values_list {
+ // TODO: warn about flag values that do not take effect?
+ let _ = cache.add_flag_value(input.source.clone(), v);
+ }
+ }
+
+ Ok(cache)
+}
+
+pub fn create_java_lib(cache: &Cache) -> Result<OutputFile> {
+ generate_java_code(cache)
+}
+
+pub fn create_cpp_lib(cache: &Cache) -> Result<OutputFile> {
+ generate_cpp_code(cache)
+}
+
+#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)]
+pub enum DumpFormat {
+ Text,
+ Debug,
+ Protobuf,
+}
+
+pub fn dump_cache(cache: Cache, format: DumpFormat) -> Result<Vec<u8>> {
+ match format {
+ DumpFormat::Text => {
+ let mut lines = vec![];
+ for item in cache.iter() {
+ lines.push(format!("{}: {:?}\n", item.name, item.state));
+ }
+ Ok(lines.concat().into())
+ }
+ DumpFormat::Debug => {
+ let mut lines = vec![];
+ for item in cache.iter() {
+ lines.push(format!("{:?}\n", item));
+ }
+ Ok(lines.concat().into())
+ }
+ DumpFormat::Protobuf => {
+ let parsed_flags: ProtoParsedFlags = cache.into();
+ let mut output = vec![];
+ parsed_flags.write_to_vec(&mut output)?;
+ Ok(output)
+ }
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+ use crate::aconfig::{FlagState, Permission};
+
+ fn create_test_cache() -> Cache {
+ let s = r#"
+ namespace: "ns"
+ flag {
+ name: "a"
+ description: "Description of a"
+ }
+ flag {
+ name: "b"
+ description: "Description of b"
+ }
+ "#;
+ let declarations = vec![Input { source: Source::Memory, reader: Box::new(s.as_bytes()) }];
+ let o = r#"
+ flag_value {
+ namespace: "ns"
+ name: "a"
+ state: DISABLED
+ permission: READ_ONLY
+ }
+ "#;
+ let values = vec![Input { source: Source::Memory, reader: Box::new(o.as_bytes()) }];
+ create_cache("ns", declarations, values).unwrap()
+ }
+
+ #[test]
+ fn test_create_cache() {
+ let cache = create_test_cache(); // calls create_cache
+ let item = cache.iter().find(|&item| item.name == "a").unwrap();
+ assert_eq!(FlagState::Disabled, item.state);
+ assert_eq!(Permission::ReadOnly, item.permission);
+ }
+
+ #[test]
+ fn test_dump_text_format() {
+ let cache = create_test_cache();
+ let bytes = dump_cache(cache, DumpFormat::Text).unwrap();
+ let text = std::str::from_utf8(&bytes).unwrap();
+ assert!(text.contains("a: Disabled"));
+ }
+
+ #[test]
+ fn test_dump_protobuf_format() {
+ use crate::protos::{ProtoFlagPermission, ProtoFlagState, ProtoTracepoint};
+ use protobuf::Message;
+
+ let cache = create_test_cache();
+ let bytes = dump_cache(cache, DumpFormat::Protobuf).unwrap();
+ let actual = ProtoParsedFlags::parse_from_bytes(&bytes).unwrap();
+
+ assert_eq!(
+ vec!["a".to_string(), "b".to_string()],
+ actual.parsed_flag.iter().map(|item| item.name.clone().unwrap()).collect::<Vec<_>>()
+ );
+
+ let item =
+ actual.parsed_flag.iter().find(|item| item.name == Some("b".to_string())).unwrap();
+ assert_eq!(item.namespace(), "ns");
+ assert_eq!(item.name(), "b");
+ assert_eq!(item.description(), "Description of b");
+ assert_eq!(item.state(), ProtoFlagState::DISABLED);
+ assert_eq!(item.permission(), ProtoFlagPermission::READ_WRITE);
+ let mut tp = ProtoTracepoint::new();
+ tp.set_source("<memory>".to_string());
+ tp.set_state(ProtoFlagState::DISABLED);
+ tp.set_permission(ProtoFlagPermission::READ_WRITE);
+ assert_eq!(item.trace, vec![tp]);
+ }
+}
diff --git a/tools/aconfig/src/main.rs b/tools/aconfig/src/main.rs
new file mode 100644
index 0000000..6db5948
--- /dev/null
+++ b/tools/aconfig/src/main.rs
@@ -0,0 +1,149 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//! `aconfig` is a build time tool to manage build time configurations, such as feature flags.
+
+use anyhow::{anyhow, ensure, Result};
+use clap::{builder::ArgAction, builder::EnumValueParser, Arg, ArgMatches, Command};
+use core::any::Any;
+use std::fs;
+use std::io;
+use std::io::Write;
+use std::path::{Path, PathBuf};
+
+mod aconfig;
+mod cache;
+mod codegen_cpp;
+mod codegen_java;
+mod commands;
+mod protos;
+
+use crate::cache::Cache;
+use commands::{DumpFormat, Input, OutputFile, Source};
+
+fn cli() -> Command {
+ Command::new("aconfig")
+ .subcommand_required(true)
+ .subcommand(
+ Command::new("create-cache")
+ .arg(Arg::new("namespace").long("namespace").required(true))
+ .arg(Arg::new("declarations").long("declarations").action(ArgAction::Append))
+ .arg(Arg::new("values").long("values").action(ArgAction::Append))
+ .arg(Arg::new("cache").long("cache").required(true)),
+ )
+ .subcommand(
+ Command::new("create-java-lib")
+ .arg(Arg::new("cache").long("cache").required(true))
+ .arg(Arg::new("out").long("out").required(true)),
+ )
+ .subcommand(
+ Command::new("create-cpp-lib")
+ .arg(Arg::new("cache").long("cache").required(true))
+ .arg(Arg::new("out").long("out").required(true)),
+ )
+ .subcommand(
+ Command::new("dump")
+ .arg(Arg::new("cache").long("cache").required(true))
+ .arg(
+ Arg::new("format")
+ .long("format")
+ .value_parser(EnumValueParser::<commands::DumpFormat>::new())
+ .default_value("text"),
+ )
+ .arg(Arg::new("out").long("out").default_value("-")),
+ )
+}
+
+fn get_required_arg<'a, T>(matches: &'a ArgMatches, arg_name: &str) -> Result<&'a T>
+where
+ T: Any + Clone + Send + Sync + 'static,
+{
+ matches
+ .get_one::<T>(arg_name)
+ .ok_or(anyhow!("internal error: required argument '{}' not found", arg_name))
+}
+
+fn open_zero_or_more_files(matches: &ArgMatches, arg_name: &str) -> Result<Vec<Input>> {
+ let mut opened_files = vec![];
+ for path in matches.get_many::<String>(arg_name).unwrap_or_default() {
+ let file = Box::new(fs::File::open(path)?);
+ opened_files.push(Input { source: Source::File(path.to_string()), reader: file });
+ }
+ Ok(opened_files)
+}
+
+fn write_output_file_realtive_to_dir(root: &Path, output_file: &OutputFile) -> Result<()> {
+ ensure!(
+ root.is_dir(),
+ "output directory {} does not exist or is not a directory",
+ root.display()
+ );
+ let path = root.join(output_file.path.clone());
+ let parent = path
+ .parent()
+ .ok_or(anyhow!("unable to locate parent of output file {}", path.display()))?;
+ fs::create_dir_all(parent)?;
+ let mut file = fs::File::create(path)?;
+ file.write_all(&output_file.contents)?;
+ Ok(())
+}
+
+fn main() -> Result<()> {
+ let matches = cli().get_matches();
+ match matches.subcommand() {
+ Some(("create-cache", sub_matches)) => {
+ let namespace = get_required_arg::<String>(sub_matches, "namespace")?;
+ let declarations = open_zero_or_more_files(sub_matches, "declarations")?;
+ let values = open_zero_or_more_files(sub_matches, "values")?;
+ let cache = commands::create_cache(namespace, declarations, values)?;
+ let path = get_required_arg::<String>(sub_matches, "cache")?;
+ let file = fs::File::create(path)?;
+ cache.write_to_writer(file)?;
+ }
+ Some(("create-java-lib", sub_matches)) => {
+ let path = get_required_arg::<String>(sub_matches, "cache")?;
+ let file = fs::File::open(path)?;
+ let cache = Cache::read_from_reader(file)?;
+ let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+ let generated_file = commands::create_java_lib(&cache)?;
+ write_output_file_realtive_to_dir(&dir, &generated_file)?;
+ }
+ Some(("create-cpp-lib", sub_matches)) => {
+ let path = get_required_arg::<String>(sub_matches, "cache")?;
+ let file = fs::File::open(path)?;
+ let cache = Cache::read_from_reader(file)?;
+ let dir = PathBuf::from(get_required_arg::<String>(sub_matches, "out")?);
+ let generated_file = commands::create_cpp_lib(&cache)?;
+ write_output_file_realtive_to_dir(&dir, &generated_file)?;
+ }
+ Some(("dump", sub_matches)) => {
+ let path = get_required_arg::<String>(sub_matches, "cache")?;
+ let file = fs::File::open(path)?;
+ let cache = Cache::read_from_reader(file)?;
+ let format = get_required_arg::<DumpFormat>(sub_matches, "format")?;
+ let output = commands::dump_cache(cache, *format)?;
+ let path = get_required_arg::<String>(sub_matches, "out")?;
+ let mut file: Box<dyn Write> = if *path == "-" {
+ Box::new(io::stdout())
+ } else {
+ Box::new(fs::File::create(path)?)
+ };
+ file.write_all(&output)?;
+ }
+ _ => unreachable!(),
+ }
+ Ok(())
+}
diff --git a/tools/aconfig/src/protos.rs b/tools/aconfig/src/protos.rs
new file mode 100644
index 0000000..cb75692
--- /dev/null
+++ b/tools/aconfig/src/protos.rs
@@ -0,0 +1,97 @@
+/*
+ * Copyright (C) 2023 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// When building with the Android tool-chain
+//
+// - an external crate `aconfig_protos` will be generated
+// - the feature "cargo" will be disabled
+//
+// When building with cargo
+//
+// - a local sub-module will be generated in OUT_DIR and included in this file
+// - the feature "cargo" will be enabled
+//
+// This module hides these differences from the rest of aconfig.
+
+// ---- When building with the Android tool-chain ----
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_declaration as ProtoFlagDeclaration;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_declarations as ProtoFlagDeclarations;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_value as ProtoFlagValue;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_values as ProtoFlagValues;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(not(feature = "cargo"))]
+pub use aconfig_protos::aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- When building with cargo ----
+#[cfg(feature = "cargo")]
+include!(concat!(env!("OUT_DIR"), "/aconfig_proto/mod.rs"));
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_declaration as ProtoFlagDeclaration;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_declarations as ProtoFlagDeclarations;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_value as ProtoFlagValue;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_values as ProtoFlagValues;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_permission as ProtoFlagPermission;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Flag_state as ProtoFlagState;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flags as ProtoParsedFlags;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Parsed_flag as ProtoParsedFlag;
+
+#[cfg(feature = "cargo")]
+pub use aconfig::Tracepoint as ProtoTracepoint;
+
+// ---- Common for both the Android tool-chain and cargo ----
+use anyhow::Result;
+
+pub fn try_from_text_proto<T>(s: &str) -> Result<T>
+where
+ T: protobuf::MessageFull,
+{
+ // warning: parse_from_str does not check if required fields are set
+ protobuf::text_format::parse_from_str(s).map_err(|e| e.into())
+}
diff --git a/tools/aconfig/templates/cpp.template b/tools/aconfig/templates/cpp.template
new file mode 100644
index 0000000..ae8b59f
--- /dev/null
+++ b/tools/aconfig/templates/cpp.template
@@ -0,0 +1,25 @@
+#ifndef {namespace}_HEADER_H
+#define {namespace}_HEADER_H
+#include "{namespace}.h"
+{{ if readwrite }}
+#include <server_configurable_flags/get_flags.h>
+using namespace server_configurable_flags;
+{{ endif }}
+namespace {namespace} \{
+ {{ for item in class_elements}}
+ class {item.flag_name} \{
+ public:
+ virtual const bool value() \{
+ {{ if item.readwrite- }}
+ return GetServerConfigurableFlag(
+ "{namespace}",
+ "{item.flag_name}",
+ "{item.default_value}") == "true";
+ {{ -else- }}
+ return {item.default_value};
+ {{ -endif }}
+ }
+ }
+ {{ endfor }}
+}
+#endif
diff --git a/tools/aconfig/templates/java.template b/tools/aconfig/templates/java.template
new file mode 100644
index 0000000..89da18b
--- /dev/null
+++ b/tools/aconfig/templates/java.template
@@ -0,0 +1,19 @@
+package {namespace};
+{{ if readwrite }}
+import android.provider.DeviceConfig;
+{{ endif }}
+public final class Flags \{
+ {{ for item in class_elements}}
+ public static boolean {item.method_name}() \{
+ {{ if item.readwrite- }}
+ return DeviceConfig.getBoolean(
+ "{namespace}",
+ "{item.feature_name}__{item.flag_name}",
+ {item.default_value}
+ );
+ {{ -else- }}
+ return {item.default_value};
+ {{ -endif }}
+ }
+ {{ endfor }}
+}
diff --git a/tools/compliance/Android.bp b/tools/compliance/Android.bp
index f85a46f..ef5c760 100644
--- a/tools/compliance/Android.bp
+++ b/tools/compliance/Android.bp
@@ -142,6 +142,7 @@
"spdx-tools-builder2v2",
"spdx-tools-spdxcommon",
"spdx-tools-spdx-json",
+ "spdx-tools-spdxlib",
],
testSrcs: ["cmd/sbom/sbom_test.go"],
}
diff --git a/tools/compliance/cmd/sbom/sbom.go b/tools/compliance/cmd/sbom/sbom.go
index 3cdfa0a..f61289e 100644
--- a/tools/compliance/cmd/sbom/sbom.go
+++ b/tools/compliance/cmd/sbom/sbom.go
@@ -38,6 +38,7 @@
"github.com/spdx/tools-golang/json"
"github.com/spdx/tools-golang/spdx/common"
spdx "github.com/spdx/tools-golang/spdx/v2_2"
+ "github.com/spdx/tools-golang/spdxlib"
)
var (
@@ -54,6 +55,7 @@
product string
stripPrefix []string
creationTime creationTimeGetter
+ buildid string
}
func (ctx context) strip(installPath string) string {
@@ -123,6 +125,7 @@
depsFile := flags.String("d", "", "Where to write the deps file")
product := flags.String("product", "", "The name of the product for which the notice is generated.")
stripPrefix := newMultiString(flags, "strip_prefix", "Prefix to remove from paths. i.e. path to root (multiple allowed)")
+ buildid := flags.String("build_id", "", "Uniquely identifies the build. (default timestamp)")
flags.Parse(expandedArgs)
@@ -161,7 +164,7 @@
ofile = obuf
}
- ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime}
+ ctx := &context{ofile, os.Stderr, compliance.FS, *product, *stripPrefix, actualTime, *buildid}
spdxDoc, deps, err := sbomGenerator(ctx, flags.Args()...)
@@ -173,6 +176,7 @@
os.Exit(1)
}
+ // writing the spdx Doc created
if err := spdx_json.Save2_2(spdxDoc, ofile); err != nil {
fmt.Fprintf(os.Stderr, "failed to write document to %v: %v", *outputFile, err)
os.Exit(1)
@@ -315,14 +319,21 @@
}
// generateSPDXNamespace generates a unique SPDX Document Namespace using a SHA1 checksum
-// and the CreationInfo.Created field as the date.
-func generateSPDXNamespace(created string) string {
- // Compute a SHA1 checksum of the CreationInfo.Created field.
- hash := sha1.Sum([]byte(created))
- checksum := hex.EncodeToString(hash[:])
+func generateSPDXNamespace(buildid string, created string, files ...string) string {
- // Combine the checksum and timestamp to generate the SPDX Namespace.
- namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s-%s", created, checksum)
+ seed := strings.Join(files, "")
+
+ if buildid == "" {
+ seed += created
+ } else {
+ seed += buildid
+ }
+
+ // Compute a SHA1 checksum of the seed.
+ hash := sha1.Sum([]byte(seed))
+ uuid := hex.EncodeToString(hash[:])
+
+ namespace := fmt.Sprintf("SPDXRef-DOCUMENT-%s", uuid)
return namespace
}
@@ -516,15 +527,21 @@
ci.Created = ctx.creationTime()
- return &spdx.Document{
+ doc := &spdx.Document{
SPDXVersion: "SPDX-2.2",
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: docName,
- DocumentNamespace: generateSPDXNamespace(ci.Created),
+ DocumentNamespace: generateSPDXNamespace(ctx.buildid, ci.Created, files...),
CreationInfo: ci,
Packages: pkgs,
Relationships: relationships,
OtherLicenses: otherLicenses,
- }, deps, nil
+ }
+
+ if err := spdxlib.ValidateDocument2_2(doc); err != nil {
+ return nil, nil, fmt.Errorf("Unable to validate the SPDX doc: %v\n", err)
+ }
+
+ return doc, deps, nil
}
diff --git a/tools/compliance/cmd/sbom/sbom_test.go b/tools/compliance/cmd/sbom/sbom_test.go
index 65a2df1..8a62713 100644
--- a/tools/compliance/cmd/sbom/sbom_test.go
+++ b/tools/compliance/cmd/sbom/sbom_test.go
@@ -59,7 +59,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-firstparty-highest.apex",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/highest.apex.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -187,7 +187,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-firstparty-application",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/application.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -266,7 +266,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-firstparty-container.zip",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/container.zip.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -394,7 +394,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-firstparty-bin-bin1",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/bin/bin1.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -460,7 +460,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-firstparty-lib-libd.so",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/firstparty/lib/libd.so.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -500,7 +500,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-notice-highest.apex",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/highest.apex.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -634,7 +634,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-notice-container.zip",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/container.zip.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -768,7 +768,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-notice-application",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/application.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -853,7 +853,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-notice-bin-bin1",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/bin/bin1.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -925,7 +925,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-notice-lib-libd.so",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/notice/lib/libd.so.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -965,7 +965,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-reciprocal-highest.apex",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/highest.apex.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1105,7 +1105,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-reciprocal-application",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/application.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1196,7 +1196,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-reciprocal-bin-bin1",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/bin/bin1.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1268,7 +1268,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-reciprocal-lib-libd.so",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/reciprocal/lib/libd.so.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1308,7 +1308,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-restricted-highest.apex",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/highest.apex.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1454,7 +1454,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-restricted-container.zip",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/container.zip.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1600,7 +1600,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-restricted-bin-bin1",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/bin/bin1.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1678,7 +1678,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-restricted-lib-libd.so",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/restricted/lib/libd.so.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1718,7 +1718,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-proprietary-highest.apex",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/highest.apex.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -1864,7 +1864,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-proprietary-container.zip",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/container.zip.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -2010,7 +2010,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-proprietary-application",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/application.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -2101,7 +2101,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-proprietary-bin-bin1",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/bin/bin1.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -2173,7 +2173,7 @@
DataLicense: "CC0-1.0",
SPDXIdentifier: "DOCUMENT",
DocumentName: "testdata-proprietary-lib-libd.so",
- DocumentNamespace: generateSPDXNamespace("1970-01-01T00:00:00Z"),
+ DocumentNamespace: generateSPDXNamespace("", "1970-01-01T00:00:00Z", "testdata/proprietary/lib/libd.so.meta_lic"),
CreationInfo: getCreationInfo(t),
Packages: []*spdx.Package{
{
@@ -2215,7 +2215,7 @@
rootFiles = append(rootFiles, "testdata/"+tt.condition+"/"+r)
}
- ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime}
+ ctx := context{stdout, stderr, compliance.GetFS(tt.outDir), "", []string{tt.stripPrefix}, fakeTime, ""}
spdxDoc, deps, err := sbomGenerator(&ctx, rootFiles...)
if err != nil {
@@ -2226,6 +2226,10 @@
t.Errorf("sbom: gotStderr = %v, want none", stderr)
}
+ if err := validate(spdxDoc); err != nil {
+ t.Fatalf("sbom: document fails to validate: %v", err)
+ }
+
gotData, err := json.Marshal(spdxDoc)
if err != nil {
t.Fatalf("sbom: failed to marshal spdx doc: %v", err)
@@ -2258,6 +2262,96 @@
}
}
+func TestGenerateSPDXNamespace(t *testing.T) {
+
+ buildID1 := "example-1"
+ buildID2 := "example-2"
+ files1 := "file1"
+ timestamp1 := "2022-05-01"
+ timestamp2 := "2022-05-02"
+ files2 := "file2"
+
+ // Test case 1: different timestamps, same files
+ nsh1 := generateSPDXNamespace("", timestamp1, files1)
+ nsh2 := generateSPDXNamespace("", timestamp2, files1)
+
+ if nsh1 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+ }
+
+ if nsh2 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp2, files1)
+ }
+
+ if nsh1 == nsh2 {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp2, files1)
+ }
+
+ // Test case 2: different build ids, same timestamps and files
+ nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+ nsh2 = generateSPDXNamespace(buildID2, timestamp1, files1)
+
+ if nsh1 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+ }
+
+ if nsh2 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID2, timestamp1, files1)
+ }
+
+ if nsh1 == nsh2 {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID2, timestamp1, files1)
+ }
+
+ // Test case 3: same build ids and files, different timestamps
+ nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+ nsh2 = generateSPDXNamespace(buildID1, timestamp2, files1)
+
+ if nsh1 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+ }
+
+ if nsh2 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp2, files1)
+ }
+
+ if nsh1 != nsh2 {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected same namespace hashes, but got different: %s and %s", buildID1, timestamp1, files1, buildID2, timestamp1, files1, nsh1, nsh2)
+ }
+
+ // Test case 4: same build ids and timestamps, different files
+ nsh1 = generateSPDXNamespace(buildID1, timestamp1, files1)
+ nsh2 = generateSPDXNamespace(buildID1, timestamp1, files2)
+
+ if nsh1 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files1)
+ }
+
+ if nsh2 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", buildID1, timestamp1, files2)
+ }
+
+ if nsh1 == nsh2 {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", buildID1, timestamp1, files1, buildID1, timestamp1, files2)
+ }
+
+ // Test case 5: empty build ids, same timestamps and different files
+ nsh1 = generateSPDXNamespace("", timestamp1, files1)
+ nsh2 = generateSPDXNamespace("", timestamp1, files2)
+
+ if nsh1 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files1)
+ }
+
+ if nsh2 == "" {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s): expected non-empty string, but got empty string", "", timestamp1, files2)
+ }
+
+ if nsh1 == nsh2 {
+ t.Errorf("generateSPDXNamespace(%s, %s, %s) and generateSPDXNamespace(%s, %s, %s): expected different namespace hashes, but got the same", "", timestamp1, files1, "", timestamp1, files2)
+ }
+}
+
func getCreationInfo(t *testing.T) *spdx.CreationInfo {
ci, err := builder2v2.BuildCreationInfoSection2_2("Organization", "Google LLC", nil)
if err != nil {
@@ -2267,6 +2361,36 @@
return ci
}
+// validate returns an error if the Document is found to be invalid
+func validate(doc *spdx.Document) error {
+ if doc.SPDXVersion == "" {
+ return fmt.Errorf("SPDXVersion: got nothing, want spdx version")
+ }
+ if doc.DataLicense == "" {
+ return fmt.Errorf("DataLicense: got nothing, want Data License")
+ }
+ if doc.SPDXIdentifier == "" {
+ return fmt.Errorf("SPDXIdentifier: got nothing, want SPDX Identifier")
+ }
+ if doc.DocumentName == "" {
+ return fmt.Errorf("DocumentName: got nothing, want Document Name")
+ }
+ if fmt.Sprintf("%v", doc.CreationInfo.Creators[1].Creator) != "Google LLC" {
+ return fmt.Errorf("Creator: got %v, want 'Google LLC'")
+ }
+ _, err := time.Parse(time.RFC3339, doc.CreationInfo.Created)
+ if err != nil {
+ return fmt.Errorf("Invalid time spec: %q: got error %q, want no error", doc.CreationInfo.Created, err)
+ }
+
+ for _, license := range doc.OtherLicenses {
+ if license.ExtractedText == "" {
+ return fmt.Errorf("License file: %q: got nothing, want license text", license.LicenseName)
+ }
+ }
+ return nil
+}
+
// compareSpdxDocs deep-compares two spdx docs by going through the info section, packages, relationships and licenses
func compareSpdxDocs(t *testing.T, actual, expected *spdx.Document) {
diff --git a/tools/finalization/build-step-1.sh b/tools/finalization/build-step-1.sh
index 3c618fe..3d5eadb 100755
--- a/tools/finalization/build-step-1.sh
+++ b/tools/finalization/build-step-1.sh
@@ -8,8 +8,7 @@
if [ "$FINAL_STATE" = "unfinalized" ] ; then
# Build finalization artifacts.
- # source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
- echo "Build finalization artifacts."
+ source $top/build/make/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
fi;
}
diff --git a/tools/finalization/environment.sh b/tools/finalization/environment.sh
index 8c838aa..9714ac4 100755
--- a/tools/finalization/environment.sh
+++ b/tools/finalization/environment.sh
@@ -2,17 +2,20 @@
set -ex
-export FINAL_BUG_ID='0'
+export FINAL_BUG_ID='0' # CI only
-export FINAL_PLATFORM_CODENAME='UpsideDownCake'
-export CURRENT_PLATFORM_CODENAME='UpsideDownCake'
-export FINAL_PLATFORM_CODENAME_JAVA='UPSIDE_DOWN_CAKE'
-export FINAL_PLATFORM_SDK_VERSION='34'
-export FINAL_PLATFORM_VERSION='14'
+export FINAL_PLATFORM_CODENAME='VanillaIceCream'
+export CURRENT_PLATFORM_CODENAME='VanillaIceCream'
+export FINAL_PLATFORM_CODENAME_JAVA='VANILLA_ICE_CREAM'
+export FINAL_PLATFORM_VERSION='15'
-export FINAL_BUILD_PREFIX='UP1A'
-
-export FINAL_MAINLINE_EXTENSION='7'
+# Set arbitrary large values for CI.
+# SDK_VERSION needs to be <61 (lint/libs/lint-api/src/main/java/com/android/tools/lint/detector/api/ApiConstraint.kt)
+# There are multiple places where we rely on next SDK version to be previous + 1, e.g. RESOURCES_SDK_INT.
+# We might or might not fix this in future, but for now let's keep it +1.
+export FINAL_PLATFORM_SDK_VERSION='35'
+# Feel free to randomize once in a while to detect buggy version detection code.
+export FINAL_MAINLINE_EXTENSION='58'
# Options:
# 'unfinalized' - branch is in development state,
diff --git a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
index cdc2e3a..fa33986 100755
--- a/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
+++ b/tools/finalization/finalize-aidl-vndk-sdk-resources.sh
@@ -4,7 +4,15 @@
function apply_droidstubs_hack() {
if ! grep -q 'STOPSHIP: RESTORE THIS LOGIC WHEN DECLARING "REL" BUILD' "$top/build/soong/java/droidstubs.go" ; then
- git -C "$top/build/soong" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+ local build_soong_git_root="$(readlink -f $top/build/soong)"
+ git -C "$build_soong_git_root" apply --allow-empty ../../build/make/tools/finalization/build_soong_java_droidstubs.go.apply_hack.diff
+ fi
+}
+
+function apply_resources_sdk_int_fix() {
+ if ! grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
+ local base_git_root="$(readlink -f $top/frameworks/base)"
+ git -C "$base_git_root" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
fi
}
@@ -86,9 +94,7 @@
AIDL_TRANSITIVE_FREEZE=true $m aidl-freeze-api create_reference_dumps
# Generate ABI dumps
- ANDROID_BUILD_TOP="$top" \
- out/host/linux-x86/bin/create_reference_dumps \
- -p aosp_arm64 --build-variant user
+ ANDROID_BUILD_TOP="$top" out/host/linux-x86/bin/create_reference_dumps
echo "NOTE: THIS INTENTIONALLY MAY FAIL AND REPAIR ITSELF (until 'DONE')"
# Update new versions of files. See update-vndk-list.sh (which requires envsetup.sh)
@@ -101,6 +107,12 @@
# frameworks/libs/modules-utils
finalize_modules_utils
+ # development/sdk
+ local platform_source="$top/development/sdk/platform_source.prop_template"
+ sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=1/g' $platform_source
+ local build_tools_source="$top/development/sdk/build_tools_source.prop_template"
+ sed -i -e 's/Pkg\.Revision.*/Pkg\.Revision=${PLATFORM_SDK_VERSION}.0.0/g' $build_tools_source
+
# build/make
local version_defaults="$top/build/make/core/version_defaults.mk"
sed -i -e "s/PLATFORM_SDK_VERSION := .*/PLATFORM_SDK_VERSION := ${FINAL_PLATFORM_SDK_VERSION}/g" $version_defaults
@@ -108,8 +120,11 @@
sed -i -e "s/sepolicy_major_vers := .*/sepolicy_major_vers := ${FINAL_PLATFORM_SDK_VERSION}/g" "$top/build/make/core/config.mk"
cp "$top/build/make/target/product/gsi/current.txt" "$top/build/make/target/product/gsi/$FINAL_PLATFORM_SDK_VERSION.txt"
- # build/soong
- sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\\t\t\t\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}," "$top/build/soong/android/api_levels.go"
+ # build/bazel
+ local codename_version="\"${FINAL_PLATFORM_CODENAME}\": ${FINAL_PLATFORM_SDK_VERSION}"
+ if ! grep -q "$codename_version" "$top/build/bazel/rules/common/api_constants.bzl" ; then
+ sed -i -e "/:.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\ $codename_version," "$top/build/bazel/rules/common/api_constants.bzl"
+ fi
# cts
echo ${FINAL_PLATFORM_VERSION} > "$top/cts/tests/tests/os/assets/platform_releases.txt"
@@ -129,13 +144,17 @@
# frameworks/base
sed -i "s%$SDK_CODENAME%$SDK_VERSION%g" "$top/frameworks/base/core/java/android/os/Build.java"
+ apply_resources_sdk_int_fix
sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\ SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt/SdkConstants.h"
sed -i -e "/=.*$((${FINAL_PLATFORM_SDK_VERSION}-1)),/a \\ SDK_${FINAL_PLATFORM_CODENAME_JAVA} = ${FINAL_PLATFORM_SDK_VERSION}," "$top/frameworks/base/tools/aapt2/SdkConstants.h"
# Bump Mainline SDK extension version.
- set +e
+ local SDKEXT="packages/modules/SdkExtensions/"
"$top/packages/modules/SdkExtensions/gen_sdk/bump_sdk.sh" ${FINAL_MAINLINE_EXTENSION}
- set -e
+ # Leave the last commit as a set of modified files.
+ # The code to create a finalization topic will pick it up later.
+ git -C ${SDKEXT} reset HEAD~1
+
local version_defaults="$top/build/make/core/version_defaults.mk"
sed -i -e "s/PLATFORM_SDK_EXTENSION_VERSION := .*/PLATFORM_SDK_EXTENSION_VERSION := ${FINAL_MAINLINE_EXTENSION}/g" $version_defaults
diff --git a/tools/finalization/finalize-sdk-rel.sh b/tools/finalization/finalize-sdk-rel.sh
index cbee005..62e5ee5 100755
--- a/tools/finalization/finalize-sdk-rel.sh
+++ b/tools/finalization/finalize-sdk-rel.sh
@@ -8,6 +8,12 @@
fi
}
+function revert_resources_sdk_int_fix() {
+ if grep -q 'public static final int RESOURCES_SDK_INT = SDK_INT;' "$top/frameworks/base/core/java/android/os/Build.java" ; then
+ git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
+ fi
+}
+
function apply_prerelease_sdk_hack() {
if ! grep -q 'STOPSHIP: hack for the pre-release SDK' "$top/frameworks/base/core/java/android/content/pm/parsing/FrameworkParsingPackageUtils.java" ; then
git -C "$top/frameworks/base" apply --allow-empty ../../build/make/tools/finalization/frameworks_base.apply_hack.diff
@@ -24,8 +30,12 @@
# let the apps built with pre-release SDK parse
apply_prerelease_sdk_hack
+ # in REL mode, resources would correctly set the resources_sdk_int, no fix required
+ revert_resources_sdk_int_fix
+
# build/make/core/version_defaults.mk
- sed -i -e "s/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := .*/PLATFORM_VERSION_CODENAME.${FINAL_BUILD_PREFIX} := REL/g" "$top/build/make/core/version_defaults.mk"
+ # Mark all versions "released".
+ sed -i 's/\(PLATFORM_VERSION_CODENAME\.[^[:space:]]*\) := [^[:space:]]*/\1 := REL/g' "$top/build/make/core/version_defaults.mk"
# cts
echo "$FINAL_PLATFORM_VERSION" > "$top/cts/tests/tests/os/assets/platform_versions.txt"
@@ -43,12 +53,14 @@
mkdir -p "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION"
cp -r "$top/prebuilts/abi-dumps/ndk/current/64/" "$top/prebuilts/abi-dumps/ndk/$FINAL_PLATFORM_SDK_VERSION/"
- # prebuilts/abi-dumps/vndk
- mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
-
# prebuilts/abi-dumps/platform
mkdir -p "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION"
cp -r "$top/prebuilts/abi-dumps/platform/current/64/" "$top/prebuilts/abi-dumps/platform/$FINAL_PLATFORM_SDK_VERSION/"
+
+ if [ "$FINAL_STATE" != "sdk" ] || [ "$FINAL_PLATFORM_CODENAME" == "$CURRENT_PLATFORM_CODENAME" ] ; then
+ # prebuilts/abi-dumps/vndk
+ mv "$top/prebuilts/abi-dumps/vndk/$CURRENT_PLATFORM_CODENAME" "$top/prebuilts/abi-dumps/vndk/$FINAL_PLATFORM_SDK_VERSION"
+ fi;
}
finalize_sdk_rel
diff --git a/tools/finalization/frameworks_base.apply_resource_sdk_int.diff b/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
new file mode 100644
index 0000000..f0576d0
--- /dev/null
+++ b/tools/finalization/frameworks_base.apply_resource_sdk_int.diff
@@ -0,0 +1,24 @@
+From cdb47fc90b8d6860ec1dc5efada1f9ccd471618b Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Tue, 11 Apr 2023 22:12:44 +0000
+Subject: [PATCH] Don't force +1 for resource resolution.
+
+Bug: 277674088
+Fixes: 277674088
+Test: boots, no crashes
+Change-Id: I17e743a0f1cf6f98fddd40c358dea5a8b9cc7723
+---
+
+diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java
+index eb47170..4d3e92b 100755
+--- a/core/java/android/os/Build.java
++++ b/core/java/android/os/Build.java
+@@ -493,7 +493,7 @@
+ * @hide
+ */
+ @TestApi
+- public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length;
++ public static final int RESOURCES_SDK_INT = SDK_INT;
+
+ /**
+ * The current lowest supported value of app target SDK. Applications targeting
diff --git a/tools/finalization/frameworks_base.revert_resource_sdk_int.diff b/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
new file mode 100644
index 0000000..2ade499
--- /dev/null
+++ b/tools/finalization/frameworks_base.revert_resource_sdk_int.diff
@@ -0,0 +1,27 @@
+From c7e460bb19071d867cd7ca04282ce42694f4f358 Mon Sep 17 00:00:00 2001
+From: Alex Buynytskyy <alexbuy@google.com>
+Date: Wed, 12 Apr 2023 01:06:26 +0000
+Subject: [PATCH] Revert "Don't force +1 for resource resolution."
+
+It's not required for master.
+
+This reverts commit f1cb683988f81579a76ddbf9993848a4a06dd28c.
+
+Bug: 277674088
+Test: boots, no crashes
+Change-Id: Ia1692548f26496fdc6f1e4f0557213c7996d6823
+---
+
+diff --git a/core/java/android/os/Build.java b/core/java/android/os/Build.java
+index 4d3e92b..eb47170 100755
+--- a/core/java/android/os/Build.java
++++ b/core/java/android/os/Build.java
+@@ -493,7 +493,7 @@
+ * @hide
+ */
+ @TestApi
+- public static final int RESOURCES_SDK_INT = SDK_INT;
++ public static final int RESOURCES_SDK_INT = SDK_INT + ACTIVE_CODENAMES.length;
+
+ /**
+ * The current lowest supported value of app target SDK. Applications targeting
diff --git a/tools/finalization/localonly-steps.sh b/tools/finalization/localonly-steps.sh
index 6107b3e..7318ca1 100755
--- a/tools/finalization/localonly-steps.sh
+++ b/tools/finalization/localonly-steps.sh
@@ -17,7 +17,7 @@
$top/build/soong/soong_ui.bash --make-mode TARGET_PRODUCT=sdk TARGET_BUILD_VARIANT=userdebug sdk dist sdk_repo DIST_DIR=out/dist
# Build Modules SDKs.
- TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh"
+ TARGET_BUILD_VARIANT=userdebug UNBUNDLED_BUILD_SDKS_FROM_SOURCE=true DIST_DIR=out/dist "$top/vendor/google/build/mainline_modules_sdks.sh" --build-release=latest
# Update prebuilts.
"$top/prebuilts/build-tools/path/linux-x86/python3" -W ignore::DeprecationWarning "$top/prebuilts/sdk/update_prebuilts.py" --local_mode -f ${FINAL_PLATFORM_SDK_VERSION} -e ${FINAL_MAINLINE_EXTENSION} --bug 1 1
diff --git a/tools/generate-sbom.py b/tools/generate-sbom.py
deleted file mode 100755
index 9583395..0000000
--- a/tools/generate-sbom.py
+++ /dev/null
@@ -1,690 +0,0 @@
-#!/usr/bin/env python3
-#
-# Copyright (C) 2023 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-Generate the SBOM of the current target product in SPDX format.
-Usage example:
- generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
- --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
- --product_out_dir=out/target/product/vsoc_x86_64 \
- --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
- --product_mfr=Google
-"""
-
-import argparse
-import csv
-import datetime
-import google.protobuf.text_format as text_format
-import hashlib
-import json
-import os
-import metadata_file_pb2
-
-# Common
-SPDXID = 'SPDXID'
-SPDX_VERSION = 'SPDXVersion'
-DATA_LICENSE = 'DataLicense'
-DOCUMENT_NAME = 'DocumentName'
-DOCUMENT_NAMESPACE = 'DocumentNamespace'
-CREATED = 'Created'
-CREATOR = 'Creator'
-EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
-
-# Package
-PACKAGE_NAME = 'PackageName'
-PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
-PACKAGE_VERSION = 'PackageVersion'
-PACKAGE_SUPPLIER = 'PackageSupplier'
-FILES_ANALYZED = 'FilesAnalyzed'
-PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
-PACKAGE_EXTERNAL_REF = 'ExternalRef'
-# Package license
-PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
-PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
-PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
-PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
-
-# File
-FILE_NAME = 'FileName'
-FILE_CHECKSUM = 'FileChecksum'
-# File license
-FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
-FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
-FILE_LICENSE_COMMENTS = 'LicenseComments'
-FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
-FILE_NOTICE = 'FileNotice'
-FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
-
-# Relationship
-RELATIONSHIP = 'Relationship'
-REL_DESCRIBES = 'DESCRIBES'
-REL_VARIANT_OF = 'VARIANT_OF'
-REL_GENERATED_FROM = 'GENERATED_FROM'
-
-# Package type
-PKG_SOURCE = 'SOURCE'
-PKG_UPSTREAM = 'UPSTREAM'
-PKG_PREBUILT = 'PREBUILT'
-
-# Security tag
-NVD_CPE23 = 'NVD-CPE2.3:'
-
-# Report
-ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
-ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
-ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
-ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
-ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
-INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
-
-
-def get_args():
- parser = argparse.ArgumentParser()
- parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
- parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
- parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
- parser.add_argument('--product_out_dir', required=True, help='The parent directory of all the installed files.')
- parser.add_argument('--build_version', required=True, help='The build version.')
- parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
- parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
- parser.add_argument('--unbundled', action='store_true', default=False, help='Generate SBOM file for unbundled module')
-
- return parser.parse_args()
-
-
-def log(*info):
- if args.verbose:
- for i in info:
- print(i)
-
-
-def new_doc_header(doc_id):
- return {
- SPDX_VERSION: 'SPDX-2.3',
- DATA_LICENSE: 'CC0-1.0',
- SPDXID: doc_id,
- DOCUMENT_NAME: args.build_version,
- DOCUMENT_NAMESPACE: f'https://www.google.com/sbom/spdx/android/{args.build_version}',
- CREATOR: 'Organization: Google, LLC',
- CREATED: '<timestamp>',
- EXTERNAL_DOCUMENT_REF: [],
- }
-
-
-def new_package_record(id, name, version, supplier, download_location=None, files_analyzed='false', external_refs=[]):
- package = {
- PACKAGE_NAME: name,
- SPDXID: id,
- PACKAGE_DOWNLOAD_LOCATION: download_location if download_location else 'NONE',
- FILES_ANALYZED: files_analyzed,
- }
- if version:
- package[PACKAGE_VERSION] = version
- if supplier:
- package[PACKAGE_SUPPLIER] = f'Organization: {supplier}'
- if external_refs:
- package[PACKAGE_EXTERNAL_REF] = external_refs
-
- return package
-
-
-def new_file_record(id, name, checksum):
- return {
- FILE_NAME: name,
- SPDXID: id,
- FILE_CHECKSUM: checksum
- }
-
-
-def encode_for_spdxid(s):
- """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
- result = ''
- for c in s:
- if c.isalnum() or c in '.-':
- result += c
- elif c in '_@/':
- result += '-'
- else:
- result += '0x' + c.encode('utf-8').hex()
-
- return result.lstrip('-')
-
-
-def new_package_id(package_name, type):
- return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
-
-
-def new_external_doc_ref(package_name, sbom_url, sbom_checksum):
- doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(package_name)}'
- return f'{EXTERNAL_DOCUMENT_REF}: {doc_ref_id} {sbom_url} {sbom_checksum}', doc_ref_id
-
-
-def new_file_id(file_path):
- return f'SPDXRef-{encode_for_spdxid(file_path)}'
-
-
-def new_relationship_record(id1, relationship, id2):
- return f'{RELATIONSHIP}: {id1} {relationship} {id2}'
-
-
-def checksum(file_path):
- file_path = args.product_out_dir + '/' + file_path
- h = hashlib.sha1()
- if os.path.islink(file_path):
- h.update(os.readlink(file_path).encode('utf-8'))
- else:
- with open(file_path, 'rb') as f:
- h.update(f.read())
- return f'SHA1: {h.hexdigest()}'
-
-
-def is_soong_prebuilt_module(file_metadata):
- return file_metadata['soong_module_type'] and file_metadata['soong_module_type'] in [
- 'android_app_import', 'android_library_import', 'cc_prebuilt_binary', 'cc_prebuilt_library',
- 'cc_prebuilt_library_headers', 'cc_prebuilt_library_shared', 'cc_prebuilt_library_static', 'cc_prebuilt_object',
- 'dex_import', 'java_import', 'java_sdk_library_import', 'java_system_modules_import',
- 'libclang_rt_prebuilt_library_static', 'libclang_rt_prebuilt_library_shared', 'llvm_prebuilt_library_static',
- 'ndk_prebuilt_object', 'ndk_prebuilt_shared_stl', 'nkd_prebuilt_static_stl', 'prebuilt_apex',
- 'prebuilt_bootclasspath_fragment', 'prebuilt_dsp', 'prebuilt_firmware', 'prebuilt_kernel_modules',
- 'prebuilt_rfsa', 'prebuilt_root', 'rust_prebuilt_dylib', 'rust_prebuilt_library', 'rust_prebuilt_rlib',
- 'vndk_prebuilt_shared',
-
- # 'android_test_import',
- # 'cc_prebuilt_test_library_shared',
- # 'java_import_host',
- # 'java_test_import',
- # 'llvm_host_prebuilt_library_shared',
- # 'prebuilt_apis',
- # 'prebuilt_build_tool',
- # 'prebuilt_defaults',
- # 'prebuilt_etc',
- # 'prebuilt_etc_host',
- # 'prebuilt_etc_xml',
- # 'prebuilt_font',
- # 'prebuilt_hidl_interfaces',
- # 'prebuilt_platform_compat_config',
- # 'prebuilt_stubs_sources',
- # 'prebuilt_usr_share',
- # 'prebuilt_usr_share_host',
- # 'soong_config_module_type_import',
- ]
-
-
-def is_source_package(file_metadata):
- module_path = file_metadata['module_path']
- return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
-
-
-def is_prebuilt_package(file_metadata):
- module_path = file_metadata['module_path']
- if module_path:
- return (module_path.startswith('prebuilts/') or
- is_soong_prebuilt_module(file_metadata) or
- file_metadata['is_prebuilt_make_module'])
-
- kernel_module_copy_files = file_metadata['kernel_module_copy_files']
- if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
- return True
-
- return False
-
-
-def get_source_package_info(file_metadata, metadata_file_path):
- if not metadata_file_path:
- return file_metadata['module_path'], []
-
- metadata_proto = metadata_file_protos[metadata_file_path]
- external_refs = []
- for tag in metadata_proto.third_party.security.tag:
- if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
- external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe23Type {tag.removeprefix(NVD_CPE23)}')
- elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
- external_refs.append(f'{PACKAGE_EXTERNAL_REF}: SECURITY cpe22Type {tag.removeprefix(NVD_CPE23)}')
-
- if metadata_proto.name:
- return metadata_proto.name, external_refs
- else:
- return os.path.basename(metadata_file_path), external_refs # return the directory name only as package name
-
-
-def get_prebuilt_package_name(file_metadata, metadata_file_path):
- name = None
- if metadata_file_path:
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.name:
- name = metadata_proto.name
- else:
- name = metadata_file_path
- elif file_metadata['module_path']:
- name = file_metadata['module_path']
- elif file_metadata['kernel_module_copy_files']:
- src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
- name = os.path.dirname(src_path)
-
- return name.removeprefix('prebuilts/').replace('/', '-')
-
-
-def get_metadata_file_path(file_metadata):
- metadata_path = ''
- if file_metadata['module_path']:
- metadata_path = file_metadata['module_path']
- elif file_metadata['kernel_module_copy_files']:
- metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
-
- while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
- metadata_path = os.path.dirname(metadata_path)
-
- return metadata_path
-
-
-def get_package_version(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- return metadata_proto.third_party.version
-
-
-def get_package_homepage(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.homepage:
- return metadata_proto.third_party.homepage
- for url in metadata_proto.third_party.url:
- if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
- return url.value
-
- return None
-
-
-def get_package_download_location(metadata_file_path):
- if not metadata_file_path:
- return None
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.url:
- urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
- if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
- return urls[0].value
- elif len(urls) > 1:
- return urls[1].value
-
- return None
-
-
-def get_sbom_fragments(installed_file_metadata, metadata_file_path):
- external_doc_ref = None
- packages = []
- relationships = []
-
- # Info from METADATA file
- homepage = get_package_homepage(metadata_file_path)
- version = get_package_version(metadata_file_path)
- download_location = get_package_download_location(metadata_file_path)
-
- if is_source_package(installed_file_metadata):
- # Source fork packages
- name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
- source_package_id = new_package_id(name, PKG_SOURCE)
- source_package = new_package_record(source_package_id, name, args.build_version, args.product_mfr,
- external_refs=external_refs)
-
- upstream_package_id = new_package_id(name, PKG_UPSTREAM)
- upstream_package = new_package_record(upstream_package_id, name, version, homepage, download_location)
- packages += [source_package, upstream_package]
- relationships.append(new_relationship_record(source_package_id, REL_VARIANT_OF, upstream_package_id))
- elif is_prebuilt_package(installed_file_metadata):
- # Prebuilt fork packages
- name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
- prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
- prebuilt_package = new_package_record(prebuilt_package_id, name, args.build_version, args.product_mfr)
- packages.append(prebuilt_package)
-
- if metadata_file_path:
- metadata_proto = metadata_file_protos[metadata_file_path]
- if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
- sbom_url = metadata_proto.third_party.sbom_ref.url
- sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
- upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
- if sbom_url and sbom_checksum and upstream_element_id:
- external_doc_ref, doc_ref_id = new_external_doc_ref(name, sbom_url, sbom_checksum)
- relationships.append(
- new_relationship_record(prebuilt_package_id, REL_VARIANT_OF, doc_ref_id + ':' + upstream_element_id))
-
- return external_doc_ref, packages, relationships
-
-
-def generate_package_verification_code(files):
- checksums = [file[FILE_CHECKSUM] for file in files]
- checksums.sort()
- h = hashlib.sha1()
- h.update(''.join(checksums).encode(encoding='utf-8'))
- return h.hexdigest()
-
-
-def write_record(f, record):
- if record.__class__.__name__ == 'dict':
- for k, v in record.items():
- if k == EXTERNAL_DOCUMENT_REF or k == PACKAGE_EXTERNAL_REF:
- for ref in v:
- f.write(ref + '\n')
- else:
- f.write('{}: {}\n'.format(k, v))
- elif record.__class__.__name__ == 'str':
- f.write(record + '\n')
- f.write('\n')
-
-
-def write_tagvalue_sbom(all_records):
- with open(args.output_file, 'w', encoding="utf-8") as output_file:
- for rec in all_records:
- write_record(output_file, rec)
-
-
-def write_json_sbom(all_records, product_package_id):
- doc = {}
- product_package = None
- for r in all_records:
- if r.__class__.__name__ == 'dict':
- if DOCUMENT_NAME in r: # Doc header
- doc['spdxVersion'] = r[SPDX_VERSION]
- doc['dataLicense'] = r[DATA_LICENSE]
- doc[SPDXID] = r[SPDXID]
- doc['name'] = r[DOCUMENT_NAME]
- doc['documentNamespace'] = r[DOCUMENT_NAMESPACE]
- doc['creationInfo'] = {
- 'creators': [r[CREATOR]],
- 'created': r[CREATED],
- }
- doc['externalDocumentRefs'] = []
- for ref in r[EXTERNAL_DOCUMENT_REF]:
- # ref is 'ExternalDocumentRef: <doc id> <doc url> SHA1: xxxxx'
- fields = ref.split(' ')
- doc_ref = {
- 'externalDocumentId': fields[1],
- 'spdxDocument': fields[2],
- 'checksum': {
- 'algorithm': fields[3][:-1],
- 'checksumValue': fields[4]
- }
- }
- doc['externalDocumentRefs'].append(doc_ref)
- doc['documentDescribes'] = []
- doc['packages'] = []
- doc['files'] = []
- doc['relationships'] = []
-
- elif PACKAGE_NAME in r: # packages
- package = {
- 'name': r[PACKAGE_NAME],
- SPDXID: r[SPDXID],
- 'downloadLocation': r[PACKAGE_DOWNLOAD_LOCATION],
- 'filesAnalyzed': r[FILES_ANALYZED] == "true"
- }
- if PACKAGE_VERSION in r:
- package['versionInfo'] = r[PACKAGE_VERSION]
- if PACKAGE_SUPPLIER in r:
- package['supplier'] = r[PACKAGE_SUPPLIER]
- if PACKAGE_VERIFICATION_CODE in r:
- package['packageVerificationCode'] = {
- 'packageVerificationCodeValue': r[PACKAGE_VERIFICATION_CODE]
- }
- if PACKAGE_EXTERNAL_REF in r:
- package['externalRefs'] = []
- for ref in r[PACKAGE_EXTERNAL_REF]:
- # ref is 'ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4'
- fields = ref.split(' ')
- ext_ref = {
- 'referenceCategory': fields[1],
- 'referenceType': fields[2],
- 'referenceLocator': fields[3],
- }
- package['externalRefs'].append(ext_ref)
-
- doc['packages'].append(package)
- if r[SPDXID] == product_package_id:
- product_package = package
- product_package['hasFiles'] = []
-
- elif FILE_NAME in r: # files
- file = {
- 'fileName': r[FILE_NAME],
- SPDXID: r[SPDXID]
- }
- checksum = r[FILE_CHECKSUM].split(': ')
- file['checksums'] = [{
- 'algorithm': checksum[0],
- 'checksumValue': checksum[1],
- }]
- doc['files'].append(file)
- product_package['hasFiles'].append(r[SPDXID])
-
- elif r.__class__.__name__ == 'str':
- if r.startswith(RELATIONSHIP):
- # r is 'Relationship: <spdxid> <relationship> <spdxid>'
- fields = r.split(' ')
- rel = {
- 'spdxElementId': fields[1],
- 'relatedSpdxElement': fields[3],
- 'relationshipType': fields[2],
- }
- if fields[2] == REL_DESCRIBES:
- doc['documentDescribes'].append(fields[3])
- else:
- doc['relationships'].append(rel)
-
- with open(args.output_file + '.json', 'w', encoding="utf-8") as output_file:
- output_file.write(json.dumps(doc, indent=4))
-
-
-def save_report(report):
- prefix, _ = os.path.splitext(args.output_file)
- with open(prefix + '-gen-report.txt', 'w', encoding='utf-8') as report_file:
- for type, issues in report.items():
- report_file.write(type + '\n')
- for issue in issues:
- report_file.write('\t' + issue + '\n')
- report_file.write('\n')
-
-
-def sort_rels(rel):
- # rel = 'Relationship file_id GENERATED_FROM package_id'
- fields = rel.split(' ')
- return fields[3] + fields[1]
-
-
-# Validate the metadata generated by Make for installed files and report if there is no metadata.
-def installed_file_has_metadata(installed_file_metadata, report):
- installed_file = installed_file_metadata['installed_file']
- module_path = installed_file_metadata['module_path']
- product_copy_files = installed_file_metadata['product_copy_files']
- kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
- is_platform_generated = installed_file_metadata['is_platform_generated']
-
- if (not module_path and
- not product_copy_files and
- not kernel_module_copy_files and
- not is_platform_generated and
- not installed_file.endswith('.fsv_meta')):
- report[ISSUE_NO_METADATA].append(installed_file)
- return False
-
- return True
-
-
-def report_metadata_file(metadata_file_path, installed_file_metadata, report):
- if metadata_file_path:
- report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
- 'installed_file: {}, module_path: {}, METADATA file: {}'.format(
- installed_file_metadata['installed_file'],
- installed_file_metadata['module_path'],
- metadata_file_path + '/METADATA'))
-
- package_metadata = metadata_file_pb2.Metadata()
- with open(metadata_file_path + '/METADATA', 'rt') as f:
- text_format.Parse(f.read(), package_metadata)
-
- if not metadata_file_path in metadata_file_protos:
- metadata_file_protos[metadata_file_path] = package_metadata
- if not package_metadata.name:
- report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
-
- if not package_metadata.third_party.version:
- report[ISSUE_METADATA_FILE_INCOMPLETE].append(
- f'{metadata_file_path}/METADATA does not has "third_party.version"')
-
- for tag in package_metadata.third_party.security.tag:
- if not tag.startswith(NVD_CPE23):
- report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
- f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
- else:
- report[ISSUE_NO_METADATA_FILE].append(
- "installed_file: {}, module_path: {}".format(
- installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
-
-
-def generate_fragment():
- with open(args.metadata, newline='') as sbom_metadata_file:
- reader = csv.DictReader(sbom_metadata_file)
- for installed_file_metadata in reader:
- installed_file = installed_file_metadata['installed_file']
- if args.output_file != args.product_out_dir + installed_file + ".spdx":
- continue
-
- module_path = installed_file_metadata['module_path']
- package_id = new_package_id(encode_for_spdxid(module_path), PKG_PREBUILT)
- package = new_package_record(package_id, module_path, args.build_version, args.product_mfr)
- file_id = new_file_id(installed_file)
- file = new_file_record(file_id, installed_file, checksum(installed_file))
- relationship = new_relationship_record(file_id, REL_GENERATED_FROM, package_id)
- records = [package, file, relationship]
- write_tagvalue_sbom(records)
- break
-
-
-def main():
- global args
- args = get_args()
- log('Args:', vars(args))
-
- if args.unbundled:
- generate_fragment()
- return
-
- global metadata_file_protos
- metadata_file_protos = {}
-
- doc_id = 'SPDXRef-DOCUMENT'
- doc_header = new_doc_header(doc_id)
-
- product_package_id = 'SPDXRef-PRODUCT'
- product_package = new_package_record(product_package_id, 'PRODUCT', args.build_version, args.product_mfr,
- files_analyzed='true')
-
- platform_package_id = 'SPDXRef-PLATFORM'
- platform_package = new_package_record(platform_package_id, 'PLATFORM', args.build_version, args.product_mfr)
-
- # Report on some issues and information
- report = {
- ISSUE_NO_METADATA: [],
- ISSUE_NO_METADATA_FILE: [],
- ISSUE_METADATA_FILE_INCOMPLETE: [],
- ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
- ISSUE_INSTALLED_FILE_NOT_EXIST: [],
- INFO_METADATA_FOUND_FOR_PACKAGE: [],
- }
-
- # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
- product_files = []
- package_ids = []
- package_records = []
- rels_file_gen_from = []
- with open(args.metadata, newline='') as sbom_metadata_file:
- reader = csv.DictReader(sbom_metadata_file)
- for installed_file_metadata in reader:
- installed_file = installed_file_metadata['installed_file']
- module_path = installed_file_metadata['module_path']
- product_copy_files = installed_file_metadata['product_copy_files']
- kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
-
- if not installed_file_has_metadata(installed_file_metadata, report):
- continue
- file_path = args.product_out_dir + '/' + installed_file
- if not (os.path.islink(file_path) or os.path.isfile(file_path)):
- report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
- continue
-
- file_id = new_file_id(installed_file)
- product_files.append(new_file_record(file_id, installed_file, checksum(installed_file)))
-
- if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
- metadata_file_path = get_metadata_file_path(installed_file_metadata)
- report_metadata_file(metadata_file_path, installed_file_metadata, report)
-
- # File from source fork packages or prebuilt fork packages
- external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
- if len(pkgs) > 0:
- if external_doc_ref and external_doc_ref not in doc_header[EXTERNAL_DOCUMENT_REF]:
- doc_header[EXTERNAL_DOCUMENT_REF].append(external_doc_ref)
- for p in pkgs:
- if not p[SPDXID] in package_ids:
- package_ids.append(p[SPDXID])
- package_records.append(p)
- for rel in rels:
- if not rel in package_records:
- package_records.append(rel)
- fork_package_id = pkgs[0][SPDXID] # The first package should be the source/prebuilt fork package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, fork_package_id))
- elif module_path or installed_file_metadata['is_platform_generated']:
- # File from PLATFORM package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif product_copy_files:
- # Format of product_copy_files: <source path>:<dest path>
- src_path = product_copy_files.split(':')[0]
- # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
- # so process them as files from PLATFORM package
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif installed_file.endswith('.fsv_meta'):
- # See build/make/core/Makefile:2988
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
- elif kernel_module_copy_files.startswith('ANDROID-GEN'):
- # For the four files generated for _dlkm, _ramdisk partitions
- # See build/make/core/Makefile:323
- rels_file_gen_from.append(new_relationship_record(file_id, REL_GENERATED_FROM, platform_package_id))
-
- product_package[PACKAGE_VERIFICATION_CODE] = generate_package_verification_code(product_files)
-
- all_records = [
- doc_header,
- product_package,
- new_relationship_record(doc_id, REL_DESCRIBES, product_package_id),
- ]
- all_records += product_files
- all_records.append(platform_package)
- all_records += package_records
- rels_file_gen_from.sort(key=sort_rels)
- all_records += rels_file_gen_from
-
- # Save SBOM records to output file
- doc_header[CREATED] = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
- write_tagvalue_sbom(all_records)
- if args.json:
- write_json_sbom(all_records, product_package_id)
-
- save_report(report)
-
-
-if __name__ == '__main__':
- main()
diff --git a/tools/rbcrun/Android.bp b/tools/rbcrun/Android.bp
index fcc33ef..4fab858 100644
--- a/tools/rbcrun/Android.bp
+++ b/tools/rbcrun/Android.bp
@@ -34,6 +34,7 @@
pkgPath: "rbcrun",
deps: [
"go-starlark-starlark",
+ "go-starlark-starlarkjson",
"go-starlark-starlarkstruct",
"go-starlark-starlarktest",
],
diff --git a/tools/rbcrun/host.go b/tools/rbcrun/host.go
index 32afa45..a0fb9e1 100644
--- a/tools/rbcrun/host.go
+++ b/tools/rbcrun/host.go
@@ -24,13 +24,19 @@
"strings"
"go.starlark.net/starlark"
+ "go.starlark.net/starlarkjson"
"go.starlark.net/starlarkstruct"
)
-const callerDirKey = "callerDir"
+type ExecutionMode int
+const (
+ ExecutionModeRbc ExecutionMode = iota
+ ExecutionModeMake ExecutionMode = iota
+)
-var LoadPathRoot = "."
-var shellPath string
+const callerDirKey = "callerDir"
+const shellKey = "shell"
+const executionModeKey = "executionMode"
type modentry struct {
globals starlark.StringDict
@@ -39,20 +45,66 @@
var moduleCache = make(map[string]*modentry)
-var builtins starlark.StringDict
+var rbcBuiltins starlark.StringDict = starlark.StringDict{
+ "struct": starlark.NewBuiltin("struct", starlarkstruct.Make),
+ // To convert find-copy-subdir and product-copy-files-by pattern
+ "rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
+ // To convert makefile's $(shell cmd)
+ "rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
+ // Output to stderr
+ "rblf_log": starlark.NewBuiltin("rblf_log", log),
+ // To convert makefile's $(wildcard foo*)
+ "rblf_wildcard": starlark.NewBuiltin("rblf_wildcard", wildcard),
+}
-func moduleName2AbsPath(moduleName string, callerDir string) (string, error) {
- path := moduleName
- if ix := strings.LastIndex(path, ":"); ix >= 0 {
- path = path[0:ix] + string(os.PathSeparator) + path[ix+1:]
+var makeBuiltins starlark.StringDict = starlark.StringDict{
+ "struct": starlark.NewBuiltin("struct", starlarkstruct.Make),
+ "json": starlarkjson.Module,
+}
+
+// Takes a module name (the first argument to the load() function) and returns the path
+// it's trying to load, stripping out leading //, and handling leading :s.
+func cleanModuleName(moduleName string, callerDir string) (string, error) {
+ if strings.Count(moduleName, ":") > 1 {
+ return "", fmt.Errorf("at most 1 colon must be present in starlark path: %s", moduleName)
}
- if strings.HasPrefix(path, "//") {
- return filepath.Abs(filepath.Join(LoadPathRoot, path[2:]))
+
+ // We don't have full support for external repositories, but at least support skylib's dicts.
+ if moduleName == "@bazel_skylib//lib:dicts.bzl" {
+ return "external/bazel-skylib/lib/dicts.bzl", nil
+ }
+
+ localLoad := false
+ if strings.HasPrefix(moduleName, "@//") {
+ moduleName = moduleName[3:]
+ } else if strings.HasPrefix(moduleName, "//") {
+ moduleName = moduleName[2:]
} else if strings.HasPrefix(moduleName, ":") {
- return filepath.Abs(filepath.Join(callerDir, path[1:]))
+ moduleName = moduleName[1:]
+ localLoad = true
} else {
- return filepath.Abs(path)
+ return "", fmt.Errorf("load path must start with // or :")
}
+
+ if ix := strings.LastIndex(moduleName, ":"); ix >= 0 {
+ moduleName = moduleName[:ix] + string(os.PathSeparator) + moduleName[ix+1:]
+ }
+
+ if filepath.Clean(moduleName) != moduleName {
+ return "", fmt.Errorf("load path must be clean, found: %s, expected: %s", moduleName, filepath.Clean(moduleName))
+ }
+ if strings.HasPrefix(moduleName, "../") {
+ return "", fmt.Errorf("load path must not start with ../: %s", moduleName)
+ }
+ if strings.HasPrefix(moduleName, "/") {
+ return "", fmt.Errorf("load path starts with /, use // for a absolute path: %s", moduleName)
+ }
+
+ if localLoad {
+ return filepath.Join(callerDir, moduleName), nil
+ }
+
+ return moduleName, nil
}
// loader implements load statement. The format of the loaded module URI is
@@ -61,14 +113,18 @@
// The presence of `|symbol` indicates that the loader should return a single 'symbol'
// bound to None if file is missing.
func loader(thread *starlark.Thread, module string) (starlark.StringDict, error) {
- pipePos := strings.LastIndex(module, "|")
- mustLoad := pipePos < 0
+ mode := thread.Local(executionModeKey).(ExecutionMode)
var defaultSymbol string
- if !mustLoad {
- defaultSymbol = module[pipePos+1:]
- module = module[:pipePos]
+ mustLoad := true
+ if mode == ExecutionModeRbc {
+ pipePos := strings.LastIndex(module, "|")
+ mustLoad = pipePos < 0
+ if !mustLoad {
+ defaultSymbol = module[pipePos+1:]
+ module = module[:pipePos]
+ }
}
- modulePath, err := moduleName2AbsPath(module, thread.Local(callerDirKey).(string))
+ modulePath, err := cleanModuleName(module, thread.Local(callerDirKey).(string))
if err != nil {
return nil, err
}
@@ -100,8 +156,17 @@
}
childThread.SetLocal(callerDirKey, filepath.Dir(modulePath))
- globals, err := starlark.ExecFile(childThread, modulePath, nil, builtins)
- e = &modentry{globals, err}
+ childThread.SetLocal(shellKey, thread.Local(shellKey))
+ childThread.SetLocal(executionModeKey, mode)
+ if mode == ExecutionModeRbc {
+ globals, err := starlark.ExecFile(childThread, modulePath, nil, rbcBuiltins)
+ e = &modentry{globals, err}
+ } else if mode == ExecutionModeMake {
+ globals, err := starlark.ExecFile(childThread, modulePath, nil, makeBuiltins)
+ e = &modentry{globals, err}
+ } else {
+ return nil, fmt.Errorf("unknown executionMode %d", mode)
+ }
} else {
e = &modentry{starlark.StringDict{defaultSymbol: starlark.None}, nil}
}
@@ -189,12 +254,13 @@
// its output the same way as Make's $(shell ) function. The end-of-lines
// ("\n" or "\r\n") are replaced with " " in the result, and the trailing
// end-of-line is removed.
-func shell(_ *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
+func shell(thread *starlark.Thread, b *starlark.Builtin, args starlark.Tuple,
kwargs []starlark.Tuple) (starlark.Value, error) {
var command string
if err := starlark.UnpackPositionalArgs(b.Name(), args, kwargs, 1, &command); err != nil {
return starlark.None, err
}
+ shellPath := thread.Local(shellKey).(string)
if shellPath == "" {
return starlark.None,
fmt.Errorf("cannot run shell, /bin/sh is missing (running on Windows?)")
@@ -223,16 +289,6 @@
return starlark.NewList(elems)
}
-// propsetFromEnv constructs a propset from the array of KEY=value strings
-func structFromEnv(env []string) *starlarkstruct.Struct {
- sd := make(map[string]starlark.Value, len(env))
- for _, x := range env {
- kv := strings.SplitN(x, "=", 2)
- sd[kv[0]] = starlark.String(kv[1])
- }
- return starlarkstruct.FromStringDict(starlarkstruct.Default, sd)
-}
-
func log(thread *starlark.Thread, fn *starlark.Builtin, args starlark.Tuple, kwargs []starlark.Tuple) (starlark.Value, error) {
sep := " "
if err := starlark.UnpackArgs("print", nil, kwargs, "sep?", &sep); err != nil {
@@ -255,50 +311,68 @@
return starlark.None, nil
}
-func setup(env []string) {
- // Create the symbols that aid makefile conversion. See README.md
- builtins = starlark.StringDict{
- "struct": starlark.NewBuiltin("struct", starlarkstruct.Make),
- "rblf_cli": structFromEnv(env),
- "rblf_env": structFromEnv(os.Environ()),
- // To convert find-copy-subdir and product-copy-files-by pattern
- "rblf_find_files": starlark.NewBuiltin("rblf_find_files", find),
- // To convert makefile's $(shell cmd)
- "rblf_shell": starlark.NewBuiltin("rblf_shell", shell),
- // Output to stderr
- "rblf_log": starlark.NewBuiltin("rblf_log", log),
- // To convert makefile's $(wildcard foo*)
- "rblf_wildcard": starlark.NewBuiltin("rblf_wildcard", wildcard),
- }
-
- // NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
- // which always uses /bin/sh to run the command
- shellPath = "/bin/sh"
- if _, err := os.Stat(shellPath); err != nil {
- shellPath = ""
- }
-}
-
// Parses, resolves, and executes a Starlark file.
// filename and src parameters are as for starlark.ExecFile:
// * filename is the name of the file to execute,
// and the name that appears in error messages;
// * src is an optional source of bytes to use instead of filename
// (it can be a string, or a byte array, or an io.Reader instance)
-// * commandVars is an array of "VAR=value" items. They are accessible from
-// the starlark script as members of the `rblf_cli` propset.
-func Run(filename string, src interface{}, commandVars []string) error {
- setup(commandVars)
+// Returns the top-level starlark variables, the list of starlark files loaded, and an error
+func Run(filename string, src interface{}, mode ExecutionMode) (starlark.StringDict, []string, error) {
+ // NOTE(asmundak): OS-specific. Behave similar to Linux `system` call,
+ // which always uses /bin/sh to run the command
+ shellPath := "/bin/sh"
+ if _, err := os.Stat(shellPath); err != nil {
+ shellPath = ""
+ }
mainThread := &starlark.Thread{
Name: "main",
- Print: func(_ *starlark.Thread, msg string) { fmt.Println(msg) },
+ Print: func(_ *starlark.Thread, msg string) {
+ if mode == ExecutionModeRbc {
+ // In rbc mode, rblf_log is used to print to stderr
+ fmt.Println(msg)
+ } else if mode == ExecutionModeMake {
+ fmt.Fprintln(os.Stderr, msg)
+ }
+ },
Load: loader,
}
- absPath, err := filepath.Abs(filename)
- if err == nil {
- mainThread.SetLocal(callerDirKey, filepath.Dir(absPath))
- _, err = starlark.ExecFile(mainThread, absPath, src, builtins)
+ filename, err := filepath.Abs(filename)
+ if err != nil {
+ return nil, nil, err
}
- return err
+ if wd, err := os.Getwd(); err == nil {
+ filename, err = filepath.Rel(wd, filename)
+ if err != nil {
+ return nil, nil, err
+ }
+ if strings.HasPrefix(filename, "../") {
+ return nil, nil, fmt.Errorf("path could not be made relative to workspace root: %s", filename)
+ }
+ } else {
+ return nil, nil, err
+ }
+
+ // Add top-level file to cache for cycle detection purposes
+ moduleCache[filename] = nil
+
+ var results starlark.StringDict
+ mainThread.SetLocal(callerDirKey, filepath.Dir(filename))
+ mainThread.SetLocal(shellKey, shellPath)
+ mainThread.SetLocal(executionModeKey, mode)
+ if mode == ExecutionModeRbc {
+ results, err = starlark.ExecFile(mainThread, filename, src, rbcBuiltins)
+ } else if mode == ExecutionModeMake {
+ results, err = starlark.ExecFile(mainThread, filename, src, makeBuiltins)
+ } else {
+ return results, nil, fmt.Errorf("unknown executionMode %d", mode)
+ }
+ loadedStarlarkFiles := make([]string, 0, len(moduleCache))
+ for file := range moduleCache {
+ loadedStarlarkFiles = append(loadedStarlarkFiles, file)
+ }
+ sort.Strings(loadedStarlarkFiles)
+
+ return results, loadedStarlarkFiles, err
}
diff --git a/tools/rbcrun/host_test.go b/tools/rbcrun/host_test.go
index 97f6ce9..10cac62 100644
--- a/tools/rbcrun/host_test.go
+++ b/tools/rbcrun/host_test.go
@@ -53,8 +53,7 @@
}
// Common setup for the tests: create thread, change to the test directory
-func testSetup(t *testing.T, env []string) *starlark.Thread {
- setup(env)
+func testSetup(t *testing.T) *starlark.Thread {
thread := &starlark.Thread{
Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
if module == "assert.star" {
@@ -72,14 +71,15 @@
func dataDir() string {
_, thisSrcFile, _, _ := runtime.Caller(0)
return filepath.Join(filepath.Dir(thisSrcFile), "testdata")
-
}
func exerciseStarlarkTestFile(t *testing.T, starFile string) {
// In order to use "assert.star" from go/starlark.net/starlarktest in the tests, provide:
// * load function that handles "assert.star"
// * starlarktest.DataFile function that finds its location
- setup(nil)
+ if err := os.Chdir(dataDir()); err != nil {
+ t.Fatal(err)
+ }
thread := &starlark.Thread{
Load: func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
if module == "assert.star" {
@@ -90,21 +90,9 @@
starlarktest.SetReporter(thread, t)
_, thisSrcFile, _, _ := runtime.Caller(0)
filename := filepath.Join(filepath.Dir(thisSrcFile), starFile)
- if _, err := starlark.ExecFile(thread, filename, nil, builtins); err != nil {
- if err, ok := err.(*starlark.EvalError); ok {
- t.Fatal(err.Backtrace())
- }
- t.Fatal(err)
- }
-}
-
-func TestCliAndEnv(t *testing.T) {
- // TODO(asmundak): convert this to use exerciseStarlarkTestFile
- if err := os.Setenv("TEST_ENVIRONMENT_FOO", "test_environment_foo"); err != nil {
- t.Fatal(err)
- }
- thread := testSetup(t, []string{"CLI_FOO=foo"})
- if _, err := starlark.ExecFile(thread, "cli_and_env.star", nil, builtins); err != nil {
+ thread.SetLocal(executionModeKey, ExecutionModeRbc)
+ thread.SetLocal(shellKey, "/bin/sh")
+ if _, err := starlark.ExecFile(thread, filename, nil, rbcBuiltins); err != nil {
if err, ok := err.(*starlark.EvalError); ok {
t.Fatal(err.Backtrace())
}
@@ -114,11 +102,8 @@
func TestFileOps(t *testing.T) {
// TODO(asmundak): convert this to use exerciseStarlarkTestFile
- if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
- t.Fatal(err)
- }
- thread := testSetup(t, nil)
- if _, err := starlark.ExecFile(thread, "file_ops.star", nil, builtins); err != nil {
+ thread := testSetup(t)
+ if _, err := starlark.ExecFile(thread, "file_ops.star", nil, rbcBuiltins); err != nil {
if err, ok := err.(*starlark.EvalError); ok {
t.Fatal(err.Backtrace())
}
@@ -128,7 +113,7 @@
func TestLoad(t *testing.T) {
// TODO(asmundak): convert this to use exerciseStarlarkTestFile
- thread := testSetup(t, nil)
+ thread := testSetup(t)
thread.Load = func(thread *starlark.Thread, module string) (starlark.StringDict, error) {
if module == "assert.star" {
return starlarktest.LoadAssertModule()
@@ -137,9 +122,12 @@
}
}
dir := dataDir()
+ if err := os.Chdir(filepath.Dir(dir)); err != nil {
+ t.Fatal(err)
+ }
thread.SetLocal(callerDirKey, dir)
- LoadPathRoot = filepath.Dir(dir)
- if _, err := starlark.ExecFile(thread, "load.star", nil, builtins); err != nil {
+ thread.SetLocal(executionModeKey, ExecutionModeRbc)
+ if _, err := starlark.ExecFile(thread, "testdata/load.star", nil, rbcBuiltins); err != nil {
if err, ok := err.(*starlark.EvalError); ok {
t.Fatal(err.Backtrace())
}
@@ -148,8 +136,5 @@
}
func TestShell(t *testing.T) {
- if err := os.Setenv("TEST_DATA_DIR", dataDir()); err != nil {
- t.Fatal(err)
- }
exerciseStarlarkTestFile(t, "testdata/shell.star")
}
diff --git a/tools/rbcrun/rbcrun/rbcrun.go b/tools/rbcrun/rbcrun/rbcrun.go
index 4db6a0b..b5182f0 100644
--- a/tools/rbcrun/rbcrun/rbcrun.go
+++ b/tools/rbcrun/rbcrun/rbcrun.go
@@ -17,52 +17,137 @@
import (
"flag"
"fmt"
- "go.starlark.net/starlark"
"os"
"rbcrun"
+ "regexp"
"strings"
+
+ "go.starlark.net/starlark"
)
var (
- execprog = flag.String("c", "", "execute program `prog`")
+ modeFlag = flag.String("mode", "", "the general behavior of rbcrun. Can be \"rbc\" or \"make\". Required.")
rootdir = flag.String("d", ".", "the value of // for load paths")
- file = flag.String("f", "", "file to execute")
perfFile = flag.String("perf", "", "save performance data")
+ identifierRe = regexp.MustCompile("[a-zA-Z_][a-zA-Z0-9_]*")
)
-func main() {
- flag.Parse()
- filename := *file
- var src interface{}
- var env []string
+func getEntrypointStarlarkFile() string {
+ filename := ""
- rc := 0
for _, arg := range flag.Args() {
- if strings.Contains(arg, "=") {
- env = append(env, arg)
- } else if filename == "" {
+ if filename == "" {
filename = arg
} else {
quit("only one file can be executed\n")
}
}
- if *execprog != "" {
- if filename != "" {
- quit("either -c or file name should be present\n")
- }
- filename = "<cmdline>"
- src = *execprog
- }
if filename == "" {
- if len(env) > 0 {
- fmt.Fprintln(os.Stderr,
- "no file to run -- if your file's name contains '=', use -f to specify it")
- }
flag.Usage()
os.Exit(1)
}
- if stat, err := os.Stat(*rootdir); os.IsNotExist(err) || !stat.IsDir() {
- quit("%s is not a directory\n", *rootdir)
+ return filename
+}
+
+func getMode() rbcrun.ExecutionMode {
+ switch *modeFlag {
+ case "rbc":
+ return rbcrun.ExecutionModeRbc
+ case "make":
+ return rbcrun.ExecutionModeMake
+ case "":
+ quit("-mode flag is required.")
+ default:
+ quit("Unknown -mode value %q, expected 1 of \"rbc\", \"make\"", *modeFlag)
+ }
+ return rbcrun.ExecutionModeMake
+}
+
+var makeStringReplacer = strings.NewReplacer("#", "\\#", "$", "$$")
+
+func cleanStringForMake(s string) (string, error) {
+ if strings.ContainsAny(s, "\\\n") {
+ // \\ in make is literally \\, not a single \, so we can't allow them.
+ // \<newline> in make will produce a space, not a newline.
+ return "", fmt.Errorf("starlark strings exported to make cannot contain backslashes or newlines")
+ }
+ return makeStringReplacer.Replace(s), nil
+}
+
+func getValueInMakeFormat(value starlark.Value, allowLists bool) (string, error) {
+ switch v := value.(type) {
+ case starlark.String:
+ if cleanedValue, err := cleanStringForMake(v.GoString()); err == nil {
+ return cleanedValue, nil
+ } else {
+ return "", err
+ }
+ case starlark.Int:
+ return v.String(), nil
+ case *starlark.List:
+ if !allowLists {
+ return "", fmt.Errorf("nested lists are not allowed to be exported from starlark to make, flatten the list in starlark first")
+ }
+ result := ""
+ for i := 0; i < v.Len(); i++ {
+ value, err := getValueInMakeFormat(v.Index(i), false)
+ if err != nil {
+ return "", err
+ }
+ if i > 0 {
+ result += " "
+ }
+ result += value
+ }
+ return result, nil
+ default:
+ return "", fmt.Errorf("only starlark strings, ints, and lists of strings/ints can be exported to make. Please convert all other types in starlark first. Found type: %s", value.Type())
+ }
+}
+
+func printVarsInMakeFormat(globals starlark.StringDict) error {
+ // We could just directly export top level variables by name instead of going through
+ // a variables_to_export_to_make dictionary, but that wouldn't allow for exporting a
+ // runtime-defined number of variables to make. This can be important because dictionaries
+ // in make are often represented by a unique variable for every key in the dictionary.
+ variablesValue, ok := globals["variables_to_export_to_make"]
+ if !ok {
+ return fmt.Errorf("expected top-level starlark file to have a \"variables_to_export_to_make\" variable")
+ }
+ variables, ok := variablesValue.(*starlark.Dict)
+ if !ok {
+ return fmt.Errorf("expected variables_to_export_to_make to be a dict, got %s", variablesValue.Type())
+ }
+
+ for _, varTuple := range variables.Items() {
+ varNameStarlark, ok := varTuple.Index(0).(starlark.String)
+ if !ok {
+ return fmt.Errorf("all keys in variables_to_export_to_make must be strings, but got %q", varTuple.Index(0).Type())
+ }
+ varName := varNameStarlark.GoString()
+ if !identifierRe.MatchString(varName) {
+ return fmt.Errorf("all variables at the top level starlark file must be valid c identifiers, but got %q", varName)
+ }
+ if varName == "LOADED_STARLARK_FILES" {
+ return fmt.Errorf("the name LOADED_STARLARK_FILES is reserved for use by the starlark interpreter")
+ }
+ valueMake, err := getValueInMakeFormat(varTuple.Index(1), true)
+ if err != nil {
+ return err
+ }
+ // The :=$= is special Kati syntax that means "set and make readonly"
+ fmt.Printf("%s :=$= %s\n", varName, valueMake)
+ }
+ return nil
+}
+
+func main() {
+ flag.Parse()
+ filename := getEntrypointStarlarkFile()
+ mode := getMode()
+
+ if os.Chdir(*rootdir) != nil {
+ quit("could not chdir to %s\n", *rootdir)
}
if *perfFile != "" {
pprof, err := os.Create(*perfFile)
@@ -74,8 +159,8 @@
quit("%s\n", err)
}
}
- rbcrun.LoadPathRoot = *rootdir
- err := rbcrun.Run(filename, src, env)
+ variables, loadedStarlarkFiles, err := rbcrun.Run(filename, nil, mode)
+ rc := 0
if *perfFile != "" {
if err2 := starlark.StopProfile(); err2 != nil {
fmt.Fprintln(os.Stderr, err2)
@@ -89,6 +174,12 @@
quit("%s\n", err)
}
}
+ if mode == rbcrun.ExecutionModeMake {
+ if err := printVarsInMakeFormat(variables); err != nil {
+ quit("%s\n", err)
+ }
+ fmt.Printf("LOADED_STARLARK_FILES := %s\n", strings.Join(loadedStarlarkFiles, " "))
+ }
os.Exit(rc)
}
diff --git a/tools/rbcrun/testdata/cli_and_env.star b/tools/rbcrun/testdata/cli_and_env.star
deleted file mode 100644
index d6f464a..0000000
--- a/tools/rbcrun/testdata/cli_and_env.star
+++ /dev/null
@@ -1,11 +0,0 @@
-# Tests rblf_env access
-load("assert.star", "assert")
-
-
-def test():
- assert.eq(rblf_env.TEST_ENVIRONMENT_FOO, "test_environment_foo")
- assert.fails(lambda: rblf_env.FOO_BAR_BAZ, ".*struct has no .FOO_BAR_BAZ attribute$")
- assert.eq(rblf_cli.CLI_FOO, "foo")
-
-
-test()
diff --git a/tools/rbcrun/testdata/file_ops.star b/tools/rbcrun/testdata/file_ops.star
index 2ee78fc..b2b907c 100644
--- a/tools/rbcrun/testdata/file_ops.star
+++ b/tools/rbcrun/testdata/file_ops.star
@@ -1,22 +1,21 @@
# Tests file ops builtins
load("assert.star", "assert")
-
def test():
myname = "file_ops.star"
files = rblf_wildcard("*.star")
assert.true(myname in files, "expected %s in %s" % (myname, files))
- files = rblf_wildcard("*.star", rblf_env.TEST_DATA_DIR)
+ files = rblf_wildcard("*.star")
assert.true(myname in files, "expected %s in %s" % (myname, files))
files = rblf_wildcard("*.xxx")
assert.true(len(files) == 0, "expansion should be empty but contains %s" % files)
mydir = "testdata"
myrelname = "%s/%s" % (mydir, myname)
- files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*")
+ files = rblf_find_files("../", "*")
assert.true(mydir in files and myrelname in files, "expected %s and %s in %s" % (mydir, myrelname, files))
- files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*", only_files=1)
+ files = rblf_find_files("../", "*", only_files=1)
assert.true(mydir not in files, "did not expect %s in %s" % (mydir, files))
assert.true(myrelname in files, "expected %s in %s" % (myrelname, files))
- files = rblf_find_files(rblf_env.TEST_DATA_DIR + "/../", "*.star")
+ files = rblf_find_files("../", "*.star")
assert.true(myrelname in files, "expected %s in %s" % (myrelname, files))
test()
diff --git a/tools/rbcrun/testdata/module1.star b/tools/rbcrun/testdata/module1.star
index be04f75..02919a0 100644
--- a/tools/rbcrun/testdata/module1.star
+++ b/tools/rbcrun/testdata/module1.star
@@ -2,6 +2,6 @@
load("assert.star", "assert")
# Make sure that builtins are defined for the loaded module, too
-assert.true(rblf_wildcard("module1.star"))
-assert.true(not rblf_wildcard("no_such file"))
+assert.true(rblf_wildcard("testdata/module1.star"))
+assert.true(not rblf_wildcard("testdata/no_such file"))
test = "module1"
diff --git a/tools/rbcrun/testdata/shell.star b/tools/rbcrun/testdata/shell.star
index ad10697..dd17375 100644
--- a/tools/rbcrun/testdata/shell.star
+++ b/tools/rbcrun/testdata/shell.star
@@ -1,5 +1,5 @@
# Tests "queue" data type
load("assert.star", "assert")
-assert.eq("load.star shell.star", rblf_shell("cd %s && ls -1 shell.star load.star 2>&1" % rblf_env.TEST_DATA_DIR))
-assert.eq("shell.star", rblf_shell("cd %s && echo shell.sta*" % rblf_env.TEST_DATA_DIR))
+assert.eq("load.star shell.star", rblf_shell("ls -1 shell.star load.star 2>&1"))
+assert.eq("shell.star", rblf_shell("echo shell.sta*"))
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index e154a0f..8d660f8 100644
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -65,7 +65,7 @@
import ota_metadata_pb2
import rangelib
import sparse_img
-
+from concurrent.futures import ThreadPoolExecutor
from apex_utils import GetApexInfoFromTargetFiles
from common import ZipDelete, PARTITIONS_WITH_CARE_MAP, ExternalError, RunAndCheckOutput, IsSparseImage, MakeTempFile, ZipWrite
@@ -818,6 +818,9 @@
"""Create a super_empty.img and store it in output_zip."""
img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "super_empty.img")
+ if os.path.exists(img.name):
+ logger.info("super_empty.img already exists; no need to rebuild...")
+ return
build_super_image.BuildSuperImage(OPTIONS.info_dict, img.name)
img.Write()
@@ -842,13 +845,14 @@
SYSTEM/ after rebuilding recovery.
"""
common.ZipDelete(zip_filename, files_list)
- with zipfile.ZipFile(zip_filename, "a",
+ output_zip = zipfile.ZipFile(zip_filename, "a",
compression=zipfile.ZIP_DEFLATED,
- allowZip64=True) as output_zip:
- for item in files_list:
- file_path = os.path.join(OPTIONS.input_tmp, item)
- assert os.path.exists(file_path)
- common.ZipWrite(output_zip, file_path, arcname=item)
+ allowZip64=True)
+ for item in files_list:
+ file_path = os.path.join(OPTIONS.input_tmp, item)
+ assert os.path.exists(file_path)
+ common.ZipWrite(output_zip, file_path, arcname=item)
+ common.ZipClose(output_zip)
def HasPartition(partition_name):
@@ -1079,8 +1083,15 @@
("system_dlkm", has_system_dlkm, AddSystemDlkm, []),
("system_other", has_system_other, AddSystemOther, []),
)
- for call in add_partition_calls:
- add_partition(*call)
+ # If output_zip exists, each add_partition_calls writes bytes to the same output_zip,
+ # which is not thread-safe. So, run them in serial if output_zip exists.
+ if output_zip:
+ for call in add_partition_calls:
+ add_partition(*call)
+ else:
+ with ThreadPoolExecutor(max_workers=len(add_partition_calls)) as executor:
+ for future in [executor.submit(add_partition, *call) for call in add_partition_calls]:
+ future.result()
AddApexInfo(output_zip)
@@ -1191,7 +1202,7 @@
AddVbmetaDigest(output_zip)
if output_zip:
- output_zip.close()
+ common.ZipClose(output_zip)
if OPTIONS.replace_updated_files_list:
ReplaceUpdatedFiles(output_zip.filename,
OPTIONS.replace_updated_files_list)
diff --git a/tools/releasetools/apex_utils.py b/tools/releasetools/apex_utils.py
index 40f7c92..59c712e 100644
--- a/tools/releasetools/apex_utils.py
+++ b/tools/releasetools/apex_utils.py
@@ -431,7 +431,7 @@
apex_zip = zipfile.ZipFile(apex_file, 'a', allowZip64=True)
common.ZipWrite(apex_zip, payload_file, arcname=APEX_PAYLOAD_IMAGE)
common.ZipWrite(apex_zip, payload_public_key, arcname=APEX_PUBKEY)
- apex_zip.close()
+ common.ZipClose(apex_zip)
# 3. Sign the APEX container with container_key.
signed_apex = common.MakeTempFile(prefix='apex-container-', suffix='.apex')
@@ -626,7 +626,7 @@
if os.path.isfile(deapexer_path):
deapexer = deapexer_path
- for apex_filename in os.listdir(target_dir):
+ for apex_filename in sorted(os.listdir(target_dir)):
apex_filepath = os.path.join(target_dir, apex_filename)
if not os.path.isfile(apex_filepath) or \
not zipfile.is_zipfile(apex_filepath):
diff --git a/tools/releasetools/check_ota_package_signature.py b/tools/releasetools/check_ota_package_signature.py
index 97957be..b395c19 100755
--- a/tools/releasetools/check_ota_package_signature.py
+++ b/tools/releasetools/check_ota_package_signature.py
@@ -142,7 +142,7 @@
"""Verifies the payload and metadata signatures in an A/B OTA payload."""
package_zip = zipfile.ZipFile(package, 'r', allowZip64=True)
if 'payload.bin' not in package_zip.namelist():
- package_zip.close()
+ common.ZipClose(package_zip)
return
print('Verifying A/B OTA payload signatures...')
@@ -160,7 +160,7 @@
'--in_file=' + payload_file,
'--public_key=' + pubkey]
common.RunAndCheckOutput(cmd)
- package_zip.close()
+ common.ZipClose(package_zip)
# Verified successfully upon reaching here.
print('\nPayload signatures VERIFIED\n\n')
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 3904a78..06de622 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -450,10 +450,7 @@
@property
def is_vabc(self):
- vendor_prop = self.info_dict.get("vendor.build.prop")
- vabc_enabled = vendor_prop and \
- vendor_prop.GetProp("ro.virtual_ab.compression.enabled") == "true"
- return vabc_enabled
+ return self.info_dict.get("virtual_ab_compression") == "true"
@property
def is_android_r(self):
@@ -461,6 +458,25 @@
return system_prop and system_prop.GetProp("ro.build.version.release") == "11"
@property
+ def vendor_api_level(self):
+ vendor_prop = self.info_dict.get("vendor.build.prop")
+ if not vendor_prop:
+ return -1
+
+ props = [
+ "ro.board.api_level",
+ "ro.board.first_api_level",
+ "ro.product.first_api_level",
+ ]
+ for prop in props:
+ value = vendor_prop.GetProp(prop)
+ try:
+ return int(value)
+ except:
+ pass
+ return -1
+
+ @property
def is_vabc_xor(self):
vendor_prop = self.info_dict.get("vendor.build.prop")
vabc_xor_enabled = vendor_prop and \
@@ -698,26 +714,73 @@
script.AssertOemProperty(prop, values, oem_no_mount)
-def ReadFromInputFile(input_file, fn):
- """Reads the contents of fn from input zipfile or directory."""
+def DoesInputFileContain(input_file, fn):
+ """Check whether the input target_files.zip contain an entry `fn`"""
if isinstance(input_file, zipfile.ZipFile):
- return input_file.read(fn).decode()
+ return fn in input_file.namelist()
elif zipfile.is_zipfile(input_file):
with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
- return zfp.read(fn).decode()
+ return fn in zfp.namelist()
+ else:
+ if not os.path.isdir(input_file):
+ raise ValueError(
+ "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+ path = os.path.join(input_file, *fn.split("/"))
+ return os.path.exists(path)
+
+
+def ReadBytesFromInputFile(input_file, fn):
+ """Reads the bytes of fn from input zipfile or directory."""
+ if isinstance(input_file, zipfile.ZipFile):
+ return input_file.read(fn)
+ elif zipfile.is_zipfile(input_file):
+ with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+ return zfp.read(fn)
else:
if not os.path.isdir(input_file):
raise ValueError(
"Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
path = os.path.join(input_file, *fn.split("/"))
try:
- with open(path) as f:
+ with open(path, "rb") as f:
return f.read()
except IOError as e:
if e.errno == errno.ENOENT:
raise KeyError(fn)
+def ReadFromInputFile(input_file, fn):
+ """Reads the str contents of fn from input zipfile or directory."""
+ return ReadBytesFromInputFile(input_file, fn).decode()
+
+
+def WriteBytesToInputFile(input_file, fn, data):
+ """Write bytes |data| contents to fn of input zipfile or directory."""
+ if isinstance(input_file, zipfile.ZipFile):
+ with input_file.open(fn, "w") as entry_fp:
+ return entry_fp.write(data)
+ elif zipfile.is_zipfile(input_file):
+ with zipfile.ZipFile(input_file, "r", allowZip64=True) as zfp:
+ with zfp.open(fn, "w") as entry_fp:
+ return entry_fp.write(data)
+ else:
+ if not os.path.isdir(input_file):
+ raise ValueError(
+ "Invalid input_file, accepted inputs are ZipFile object, path to .zip file on disk, or path to extracted directory. Actual: " + input_file)
+ path = os.path.join(input_file, *fn.split("/"))
+ try:
+ with open(path, "wb") as f:
+ return f.write(data)
+ except IOError as e:
+ if e.errno == errno.ENOENT:
+ raise KeyError(fn)
+
+
+def WriteToInputFile(input_file, fn, str: str):
+ """Write str content to fn of input file or directory"""
+ return WriteBytesToInputFile(input_file, fn, str.encode())
+
+
def ExtractFromInputFile(input_file, fn):
"""Extracts the contents of fn from input zipfile or directory into a file."""
if isinstance(input_file, zipfile.ZipFile):
@@ -1357,11 +1420,8 @@
def AppendAVBSigningArgs(cmd, partition):
"""Append signing arguments for avbtool."""
# e.g., "--key path/to/signing_key --algorithm SHA256_RSA4096"
- key_path = OPTIONS.info_dict.get("avb_" + partition + "_key_path")
- if key_path and not os.path.exists(key_path) and OPTIONS.search_path:
- new_key_path = os.path.join(OPTIONS.search_path, key_path)
- if os.path.exists(new_key_path):
- key_path = new_key_path
+ key_path = ResolveAVBSigningPathArgs(
+ OPTIONS.info_dict.get("avb_" + partition + "_key_path"))
algorithm = OPTIONS.info_dict.get("avb_" + partition + "_algorithm")
if key_path and algorithm:
cmd.extend(["--key", key_path, "--algorithm", algorithm])
@@ -1371,6 +1431,32 @@
cmd.extend(["--salt", avb_salt])
+def ResolveAVBSigningPathArgs(split_args):
+
+ def ResolveBinaryPath(path):
+ if os.path.exists(path):
+ return path
+ new_path = os.path.join(OPTIONS.search_path, path)
+ if os.path.exists(new_path):
+ return new_path
+ raise ExternalError(
+ "Failed to find {}".format(new_path))
+
+ if not split_args:
+ return split_args
+
+ if isinstance(split_args, list):
+ for index, arg in enumerate(split_args[:-1]):
+ if arg == '--signing_helper':
+ signing_helper_path = split_args[index + 1]
+ split_args[index + 1] = ResolveBinaryPath(signing_helper_path)
+ break
+ elif isinstance(split_args, str):
+ split_args = ResolveBinaryPath(split_args)
+
+ return split_args
+
+
def GetAvbPartitionArg(partition, image, info_dict=None):
"""Returns the VBMeta arguments for partition.
@@ -1423,10 +1509,7 @@
"""
if key is None:
key = info_dict["avb_" + partition + "_key_path"]
- if key and not os.path.exists(key) and OPTIONS.search_path:
- new_key_path = os.path.join(OPTIONS.search_path, key)
- if os.path.exists(new_key_path):
- key = new_key_path
+ key = ResolveAVBSigningPathArgs(key)
pubkey_path = ExtractAvbPublicKey(info_dict["avb_avbtool"], key)
rollback_index_location = info_dict[
"avb_" + partition + "_rollback_index_location"]
@@ -1442,10 +1525,7 @@
key_path = OPTIONS.info_dict.get("gki_signing_key_path")
algorithm = OPTIONS.info_dict.get("gki_signing_algorithm")
- if not os.path.exists(key_path) and OPTIONS.search_path:
- new_key_path = os.path.join(OPTIONS.search_path, key_path)
- if os.path.exists(new_key_path):
- key_path = new_key_path
+ key_path = ResolveAVBSigningPathArgs(key_path)
# Checks key_path exists, before processing --gki_signing_* args.
if not os.path.exists(key_path):
@@ -1505,7 +1585,8 @@
custom_partitions = OPTIONS.info_dict.get(
"avb_custom_images_partition_list", "").strip().split()
- custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get("avb_custom_vbmeta_images_partition_list", "").strip().split()]
+ custom_avb_partitions = ["vbmeta_" + part for part in OPTIONS.info_dict.get(
+ "avb_custom_vbmeta_images_partition_list", "").strip().split()]
for partition, path in partitions.items():
if partition not in needed_partitions:
@@ -1541,6 +1622,8 @@
found = True
break
assert found, 'Failed to find {}'.format(chained_image)
+
+ split_args = ResolveAVBSigningPathArgs(split_args)
cmd.extend(split_args)
RunAndCheckOutput(cmd)
@@ -1751,7 +1834,8 @@
AppendAVBSigningArgs(cmd, partition_name)
args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
if args and args.strip():
- cmd.extend(shlex.split(args))
+ split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+ cmd.extend(split_args)
RunAndCheckOutput(cmd)
img.seek(os.SEEK_SET, 0)
@@ -1792,7 +1876,8 @@
AppendAVBSigningArgs(cmd, partition_name)
args = info_dict.get("avb_" + partition_name + "_add_hash_footer_args")
if args and args.strip():
- cmd.extend(shlex.split(args))
+ split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+ cmd.extend(split_args)
RunAndCheckOutput(cmd)
@@ -1867,7 +1952,7 @@
data = _BuildBootableImage(prebuilt_name, os.path.join(unpack_dir, tree_subdir),
os.path.join(unpack_dir, fs_config),
os.path.join(unpack_dir, 'META/ramdisk_node_list')
- if dev_nodes else None,
+ if dev_nodes else None,
info_dict, has_ramdisk, two_step_image)
if data:
return File(name, data)
@@ -1972,7 +2057,8 @@
AppendAVBSigningArgs(cmd, partition_name)
args = info_dict.get(f'avb_{partition_name}_add_hash_footer_args')
if args and args.strip():
- cmd.extend(shlex.split(args))
+ split_args = ResolveAVBSigningPathArgs(shlex.split(args))
+ cmd.extend(split_args)
RunAndCheckOutput(cmd)
img.seek(os.SEEK_SET, 0)
@@ -2809,6 +2895,18 @@
def ZipWrite(zip_file, filename, arcname=None, perms=0o644,
compress_type=None):
+ # http://b/18015246
+ # Python 2.7's zipfile implementation wrongly thinks that zip64 is required
+ # for files larger than 2GiB. We can work around this by adjusting their
+ # limit. Note that `zipfile.writestr()` will not work for strings larger than
+ # 2GiB. The Python interpreter sometimes rejects strings that large (though
+ # it isn't clear to me exactly what circumstances cause this).
+ # `zipfile.write()` must be used directly to work around this.
+ #
+ # This mess can be avoided if we port to python3.
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
if compress_type is None:
compress_type = zip_file.compression
if arcname is None:
@@ -2834,13 +2932,14 @@
finally:
os.chmod(filename, saved_stat.st_mode)
os.utime(filename, (saved_stat.st_atime, saved_stat.st_mtime))
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipWriteStr(zip_file, zinfo_or_arcname, data, perms=None,
compress_type=None):
"""Wrap zipfile.writestr() function to work around the zip64 limit.
- Python's zip implementation won't allow writing a string
+ Even with the ZIP64_LIMIT workaround, it won't allow writing a string
longer than 2GiB. It gives 'OverflowError: size does not fit in an int'
when calling crc32(bytes).
@@ -2849,6 +2948,9 @@
when we know the string won't be too long.
"""
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
zinfo.compress_type = zip_file.compression
@@ -2881,6 +2983,7 @@
zinfo.date_time = (2009, 1, 1, 0, 0, 0)
zip_file.writestr(zinfo, data)
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
def ZipDelete(zip_filename, entries, force=False):
@@ -2909,10 +3012,21 @@
cmd.append(entry)
RunAndCheckOutput(cmd)
-
os.replace(new_zipfile, zip_filename)
+def ZipClose(zip_file):
+ # http://b/18015246
+ # zipfile also refers to ZIP64_LIMIT during close() when it writes out the
+ # central directory.
+ saved_zip64_limit = zipfile.ZIP64_LIMIT
+ zipfile.ZIP64_LIMIT = (1 << 32) - 1
+
+ zip_file.close()
+
+ zipfile.ZIP64_LIMIT = saved_zip64_limit
+
+
class DeviceSpecificParams(object):
module = None
@@ -4002,6 +4116,7 @@
# https://source.android.com/devices/bootloader/images
return fp.read(4) == b'\x3A\xFF\x26\xED'
+
def ParseUpdateEngineConfig(path: str):
"""Parse the update_engine config stored in file `path`
Args
@@ -4023,4 +4138,4 @@
if not minor:
raise ValueError(
f"{path} is an invalid update_engine config, missing PAYLOAD_MINOR_VERSION {data}")
- return (int(major.group(1)), int(minor.group(1)))
\ No newline at end of file
+ return (int(major.group(1)), int(minor.group(1)))
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index f8bdd81..7c27ef7 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -118,6 +118,7 @@
entries = [
'OTA/android-info.txt:android-info.txt',
+ 'META/fastboot-info.txt:fastboot-info.txt',
]
with zipfile.ZipFile(input_file) as input_zip:
namelist = input_zip.namelist()
diff --git a/tools/releasetools/merge/merge_target_files.py b/tools/releasetools/merge/merge_target_files.py
index 8f93688..ba2b14f 100755
--- a/tools/releasetools/merge/merge_target_files.py
+++ b/tools/releasetools/merge/merge_target_files.py
@@ -165,6 +165,19 @@
pass
+def include_meta_in_list(item_list):
+ """Include all `META/*` files in the item list.
+
+ To ensure that `AddImagesToTargetFiles` can still be used with vendor item
+ list that do not specify all of the required META/ files, those files should
+ be included by default. This preserves the backward compatibility of
+ `rebuild_image_with_sepolicy`.
+ """
+ if not item_list:
+ return None
+ return list(item_list) + ['META/*']
+
+
def create_merged_package(temp_dir):
"""Merges two target files packages into one target files structure.
@@ -276,7 +289,7 @@
merge_utils.CollectTargetFiles(
input_zipfile_or_dir=OPTIONS.vendor_target_files,
output_dir=vendor_target_files_dir,
- item_list=OPTIONS.vendor_item_list)
+ item_list=include_meta_in_list(OPTIONS.vendor_item_list))
# Copy the partition contents from the merged target-files archive to the
# vendor target-files archive.
diff --git a/tools/releasetools/merge/merge_utils.py b/tools/releasetools/merge/merge_utils.py
index c284338..b5683a8 100644
--- a/tools/releasetools/merge/merge_utils.py
+++ b/tools/releasetools/merge/merge_utils.py
@@ -181,6 +181,7 @@
_PARTITION_ITEM_PATTERN = re.compile(r'^([A-Z_]+)/.*$')
_IMAGE_PARTITION_PATTERN = re.compile(r'^IMAGES/(.*)\.img$')
+_PREBUILT_IMAGE_PARTITION_PATTERN = re.compile(r'^PREBUILT_IMAGES/(.*)\.img$')
def ItemListToPartitionSet(item_list):
@@ -203,12 +204,12 @@
partition_set = set()
for item in item_list:
- for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN):
+ for pattern in (_PARTITION_ITEM_PATTERN, _IMAGE_PARTITION_PATTERN, _PREBUILT_IMAGE_PARTITION_PATTERN):
partition_match = pattern.search(item.strip())
if partition_match:
partition = partition_match.group(1).lower()
# These directories in target-files are not actual partitions.
- if partition not in ('meta', 'images'):
+ if partition not in ('meta', 'images', 'prebuilt_images'):
partition_set.add(partition)
return partition_set
@@ -217,7 +218,7 @@
# Partitions that are grabbed from the framework partial build by default.
_FRAMEWORK_PARTITIONS = {
'system', 'product', 'system_ext', 'system_other', 'root', 'system_dlkm',
- 'vbmeta_system'
+ 'vbmeta_system', 'pvmfw'
}
@@ -253,7 +254,7 @@
if partition == 'meta':
continue
- if partition == 'images':
+ if partition in ('images', 'prebuilt_images'):
image_partition, extension = os.path.splitext(os.path.basename(namelist))
if image_partition == 'vbmeta':
# Always regenerate vbmeta.img since it depends on hash information
diff --git a/tools/releasetools/merge_ota.py b/tools/releasetools/merge_ota.py
index 7d3d3a3..441312c 100644
--- a/tools/releasetools/merge_ota.py
+++ b/tools/releasetools/merge_ota.py
@@ -14,6 +14,7 @@
import argparse
import logging
+import shlex
import struct
import sys
import update_payload
@@ -34,6 +35,7 @@
logger = logging.getLogger(__name__)
CARE_MAP_ENTRY = "care_map.pb"
+APEX_INFO_ENTRY = "apex_info.pb"
def WriteDataBlob(payload: Payload, outfp: BinaryIO, read_size=1024*64):
@@ -188,6 +190,22 @@
f"OTA {partition_to_ota[part].name} and {payload.name} have duplicating partition {part}")
partition_to_ota[part] = payload
+def ApexInfo(file_paths):
+ if len(file_paths) > 1:
+ logger.info("More than one target file specified, will ignore "
+ "apex_info.pb (if any)")
+ return None
+ with zipfile.ZipFile(file_paths[0], "r", allowZip64=True) as zfp:
+ if APEX_INFO_ENTRY in zfp.namelist():
+ apex_info_bytes = zfp.read(APEX_INFO_ENTRY)
+ return apex_info_bytes
+ return None
+
+def ParseSignerArgs(args):
+ if args is None:
+ return None
+ return shlex.split(args)
+
def main(argv):
parser = argparse.ArgumentParser(description='Merge multiple partial OTAs')
parser.add_argument('packages', type=str, nargs='+',
@@ -196,6 +214,13 @@
help='Paths to private key for signing payload')
parser.add_argument('--search_path', type=str,
help='Search path for framework/signapk.jar')
+ parser.add_argument('--payload_signer', type=str,
+ help='Path to custom payload signer')
+ parser.add_argument('--payload_signer_args', type=ParseSignerArgs,
+ help='Arguments for payload signer if necessary')
+ parser.add_argument('--payload_signer_maximum_signature_size', type=str,
+ help='Maximum signature size (in bytes) that would be '
+ 'generated by the given payload signer')
parser.add_argument('--output', type=str,
help='Paths to output merged ota', required=True)
parser.add_argument('--metadata_ota', type=str,
@@ -203,6 +228,9 @@
parser.add_argument('--private_key_suffix', type=str,
help='Suffix to be appended to package_key path', default=".pk8")
parser.add_argument('-v', action="store_true", help="Enable verbose logging", dest="verbose")
+ parser.epilog = ('This tool can also be used to resign a regular OTA. For a single regular OTA, '
+ 'apex_info.pb will be written to output. When merging multiple OTAs, '
+ 'apex_info.pb will not be written.')
args = parser.parse_args(argv[1:])
file_paths = args.packages
@@ -225,6 +253,13 @@
merged_manifest = MergeManifests(payloads)
+ # Get signing keys
+ key_passwords = common.GetKeyPasswords([args.package_key])
+
+ generator = PayloadGenerator()
+
+ apex_info_bytes = ApexInfo(file_paths)
+
with tempfile.NamedTemporaryFile() as unsigned_payload:
WriteHeaderAndManifest(merged_manifest, unsigned_payload)
ConcatBlobs(payloads, unsigned_payload)
@@ -236,20 +271,31 @@
if args.package_key:
logger.info("Signing payload...")
- signer = PayloadSigner(args.package_key, args.private_key_suffix)
+ # TODO: remove OPTIONS when no longer used as fallback in payload_signer
+ common.OPTIONS.payload_signer_args = None
+ common.OPTIONS.payload_signer_maximum_signature_size = None
+ signer = PayloadSigner(args.package_key, args.private_key_suffix,
+ key_passwords[args.package_key],
+ payload_signer=args.payload_signer,
+ payload_signer_args=args.payload_signer_args,
+ payload_signer_maximum_signature_size=args.payload_signer_maximum_signature_size)
generator.payload_file = unsigned_payload.name
generator.Sign(signer)
logger.info("Payload size: %d", os.path.getsize(generator.payload_file))
logger.info("Writing to %s", args.output)
+
key_passwords = common.GetKeyPasswords([args.package_key])
with tempfile.NamedTemporaryFile(prefix="signed_ota", suffix=".zip") as signed_ota:
with zipfile.ZipFile(signed_ota, "w") as zfp:
generator.WriteToZip(zfp)
care_map_bytes = MergeCareMap(args.packages)
if care_map_bytes:
- zfp.writestr(CARE_MAP_ENTRY, care_map_bytes)
+ common.ZipWriteStr(zfp, CARE_MAP_ENTRY, care_map_bytes)
+ if apex_info_bytes:
+ logger.info("Writing %s", APEX_INFO_ENTRY)
+ common.ZipWriteStr(zfp, APEX_INFO_ENTRY, apex_info_bytes)
AddOtaMetadata(signed_ota.name, metadata_ota,
args.output, args.package_key, key_passwords[args.package_key])
return 0
diff --git a/tools/releasetools/non_ab_ota.py b/tools/releasetools/non_ab_ota.py
index 7078d67..667891c 100644
--- a/tools/releasetools/non_ab_ota.py
+++ b/tools/releasetools/non_ab_ota.py
@@ -23,6 +23,7 @@
from check_target_files_vintf import CheckVintfIfTrebleEnabled, HasPartition
from common import OPTIONS
from ota_utils import UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata, PropertyFiles
+import subprocess
logger = logging.getLogger(__name__)
@@ -272,12 +273,13 @@
# We haven't written the metadata entry, which will be done in
# FinalizeMetadata.
- output_zip.close()
+ common.ZipClose(output_zip)
needed_property_files = (
NonAbOtaPropertyFiles(),
)
- FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+ FinalizeMetadata(metadata, staging_file, output_file,
+ needed_property_files, package_key=OPTIONS.package_key)
def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_file):
@@ -526,13 +528,14 @@
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- output_zip.close()
+ common.ZipClose(output_zip)
# Sign the generated zip package unless no_signing is specified.
needed_property_files = (
NonAbOtaPropertyFiles(),
)
- FinalizeMetadata(metadata, staging_file, output_file, needed_property_files, package_key=OPTIONS.package_key)
+ FinalizeMetadata(metadata, staging_file, output_file,
+ needed_property_files, package_key=OPTIONS.package_key)
def GenerateNonAbOtaPackage(target_file, output_file, source_file=None):
@@ -555,8 +558,18 @@
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
else:
- logger.info("unzipping target target-files...")
- OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+ if not os.path.isdir(target_file):
+ logger.info("unzipping target target-files...")
+ OPTIONS.input_tmp = common.UnzipTemp(target_file, UNZIP_PATTERN)
+ else:
+ OPTIONS.input_tmp = target_file
+ tmpfile = common.MakeTempFile(suffix=".zip")
+ os.unlink(tmpfile)
+ common.RunAndCheckOutput(
+ ["zip", tmpfile, "-r", ".", "-0"], cwd=target_file)
+ assert zipfile.is_zipfile(tmpfile)
+ target_file = tmpfile
+
OPTIONS.target_tmp = OPTIONS.input_tmp
# If the caller explicitly specified the device-specific extensions path via
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index 2458244..afbe81a 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -248,6 +248,9 @@
--security_patch_level
Override the security patch level in target files
+
+ --max_threads
+ Specify max number of threads allowed when generating A/B OTA
"""
from __future__ import print_function
@@ -267,10 +270,9 @@
import common
import ota_utils
from ota_utils import (UNZIP_PATTERN, FinalizeMetadata, GetPackageMetadata,
- PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME)
-from common import IsSparseImage
+ PayloadGenerator, SECURITY_PATCH_LEVEL_PROP_NAME, ExtractTargetFiles, CopyTargetFilesDir)
+from common import DoesInputFileContain, IsSparseImage
import target_files_diff
-from check_target_files_vintf import CheckVintfIfTrebleEnabled
from non_ab_ota import GenerateNonAbOtaPackage
from payload_signer import PayloadSigner
@@ -321,6 +323,8 @@
OPTIONS.enable_lz4diff = False
OPTIONS.vabc_compression_param = None
OPTIONS.security_patch_level = None
+OPTIONS.max_threads = None
+
POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
DYNAMIC_PARTITION_INFO = 'META/dynamic_partitions_info.txt'
@@ -495,7 +499,7 @@
else:
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
- target_zip.close()
+ common.ZipClose(target_zip)
return target_file
@@ -514,20 +518,14 @@
Returns:
The filename of target-files.zip that doesn't contain postinstall config.
"""
- # We should only make a copy if postinstall_config entry exists.
- with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
- if POSTINSTALL_CONFIG not in input_zip.namelist():
- return input_file
-
- target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
- shutil.copyfile(input_file, target_file)
- common.ZipDelete(target_file, POSTINSTALL_CONFIG)
- return target_file
+ config_path = os.path.join(input_file, POSTINSTALL_CONFIG)
+ if os.path.exists(config_path):
+ os.unlink(config_path)
+ return input_file
def ParseInfoDict(target_file_path):
- with zipfile.ZipFile(target_file_path, 'r', allowZip64=True) as zfp:
- return common.LoadInfoDict(zfp)
+ return common.LoadInfoDict(target_file_path)
def GetTargetFilesZipForCustomVABCCompression(input_file, vabc_compression_param):
@@ -539,6 +537,17 @@
Returns:
The path to modified target-files.zip
"""
+ if os.path.isdir(input_file):
+ dynamic_partition_info_path = os.path.join(
+ input_file, "META", "dynamic_partitions_info.txt")
+ with open(dynamic_partition_info_path, "r") as fp:
+ dynamic_partition_info = fp.read()
+ dynamic_partition_info = ModifyVABCCompressionParam(
+ dynamic_partition_info, vabc_compression_param)
+ with open(dynamic_partition_info_path, "w") as fp:
+ fp.write(dynamic_partition_info)
+ return input_file
+
target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
shutil.copyfile(input_file, target_file)
common.ZipDelete(target_file, DYNAMIC_PARTITION_INFO)
@@ -566,23 +575,7 @@
The filename of target-files.zip used for partial ota update.
"""
- def AddImageForPartition(partition_name):
- """Add the archive name for a given partition to the copy list."""
- for prefix in ['IMAGES', 'RADIO']:
- image_path = '{}/{}.img'.format(prefix, partition_name)
- if image_path in namelist:
- copy_entries.append(image_path)
- map_path = '{}/{}.map'.format(prefix, partition_name)
- if map_path in namelist:
- copy_entries.append(map_path)
- return
-
- raise ValueError("Cannot find {} in input zipfile".format(partition_name))
-
- with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
- original_ab_partitions = input_zip.read(
- AB_PARTITIONS).decode().splitlines()
- namelist = input_zip.namelist()
+ original_ab_partitions = common.ReadFromInputFile(input_file, AB_PARTITIONS)
unrecognized_partitions = [partition for partition in ab_partitions if
partition not in original_ab_partitions]
@@ -591,50 +584,65 @@
unrecognized_partitions)
logger.info("Generating partial updates for %s", ab_partitions)
+ for subdir in ["IMAGES", "RADIO", "PREBUILT_IMAGES"]:
+ image_dir = os.path.join(subdir)
+ if not os.path.exists(image_dir):
+ continue
+ for filename in os.listdir(image_dir):
+ filepath = os.path.join(image_dir, filename)
+ if filename.endswith(".img"):
+ partition_name = filename.removesuffix(".img")
+ if partition_name not in ab_partitions:
+ os.unlink(filepath)
- copy_entries = ['META/update_engine_config.txt']
- for partition_name in ab_partitions:
- AddImageForPartition(partition_name)
+ common.WriteToInputFile(input_file, 'META/ab_partitions.txt',
+ '\n'.join(ab_partitions))
+ CARE_MAP_ENTRY = "META/care_map.pb"
+ if DoesInputFileContain(input_file, CARE_MAP_ENTRY):
+ caremap = care_map_pb2.CareMap()
+ caremap.ParseFromString(
+ common.ReadBytesFromInputFile(input_file, CARE_MAP_ENTRY))
+ filtered = [
+ part for part in caremap.partitions if part.name in ab_partitions]
+ del caremap.partitions[:]
+ caremap.partitions.extend(filtered)
+ common.WriteBytesToInputFile(input_file, CARE_MAP_ENTRY,
+ caremap.SerializeToString())
- # Use zip2zip to avoid extracting the zipfile.
- partial_target_file = common.MakeTempFile(suffix='.zip')
- cmd = ['zip2zip', '-i', input_file, '-o', partial_target_file]
- cmd.extend(['{}:{}'.format(name, name) for name in copy_entries])
- common.RunAndCheckOutput(cmd)
+ for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
+ if not DoesInputFileContain(input_file, info_file):
+ logger.warning('Cannot find %s in input zipfile', info_file)
+ continue
- partial_target_zip = zipfile.ZipFile(partial_target_file, 'a',
- allowZip64=True)
- with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
- common.ZipWriteStr(partial_target_zip, 'META/ab_partitions.txt',
- '\n'.join(ab_partitions))
- CARE_MAP_ENTRY = "META/care_map.pb"
- if CARE_MAP_ENTRY in input_zip.namelist():
- caremap = care_map_pb2.CareMap()
- caremap.ParseFromString(input_zip.read(CARE_MAP_ENTRY))
- filtered = [
- part for part in caremap.partitions if part.name in ab_partitions]
- del caremap.partitions[:]
- caremap.partitions.extend(filtered)
- common.ZipWriteStr(partial_target_zip, CARE_MAP_ENTRY,
- caremap.SerializeToString())
+ content = common.ReadFromInputFile(input_file, info_file)
+ modified_info = UpdatesInfoForSpecialUpdates(
+ content, lambda p: p in ab_partitions)
+ if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
+ modified_info = ModifyVABCCompressionParam(
+ modified_info, OPTIONS.vabc_compression_param)
+ common.WriteToInputFile(input_file, info_file, modified_info)
- for info_file in ['META/misc_info.txt', DYNAMIC_PARTITION_INFO]:
- if info_file not in input_zip.namelist():
- logger.warning('Cannot find %s in input zipfile', info_file)
- continue
- content = input_zip.read(info_file).decode()
- modified_info = UpdatesInfoForSpecialUpdates(
- content, lambda p: p in ab_partitions)
- if OPTIONS.vabc_compression_param and info_file == DYNAMIC_PARTITION_INFO:
- modified_info = ModifyVABCCompressionParam(
- modified_info, OPTIONS.vabc_compression_param)
- common.ZipWriteStr(partial_target_zip, info_file, modified_info)
+ def IsInPartialList(postinstall_line: str):
+ idx = postinstall_line.find("=")
+ if idx < 0:
+ return False
+ key = postinstall_line[:idx]
+ logger.info("%s %s", key, ab_partitions)
+ for part in ab_partitions:
+ if key.endswith("_" + part):
+ return True
+ return False
- # TODO(xunchang) handle META/postinstall_config.txt'
+ postinstall_config = common.ReadFromInputFile(input_file, POSTINSTALL_CONFIG)
+ postinstall_config = [
+ line for line in postinstall_config.splitlines() if IsInPartialList(line)]
+ if postinstall_config:
+ postinstall_config = "\n".join(postinstall_config)
+ common.WriteToInputFile(input_file, POSTINSTALL_CONFIG, postinstall_config)
+ else:
+ os.unlink(os.path.join(input_file, POSTINSTALL_CONFIG))
- partial_target_zip.close()
-
- return partial_target_file
+ return input_file
def GetTargetFilesZipForRetrofitDynamicPartitions(input_file,
@@ -659,21 +667,12 @@
replace = {'OTA/super_{}.img'.format(dev): 'IMAGES/{}.img'.format(dev)
for dev in super_block_devices}
- target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
- shutil.copyfile(input_file, target_file)
-
- with zipfile.ZipFile(input_file, allowZip64=True) as input_zip:
- namelist = input_zip.namelist()
-
- input_tmp = common.UnzipTemp(input_file, RETROFIT_DAP_UNZIP_PATTERN)
-
# Remove partitions from META/ab_partitions.txt that is in
# dynamic_partition_list but not in super_block_devices so that
# brillo_update_payload won't generate update for those logical partitions.
- ab_partitions_file = os.path.join(input_tmp, *AB_PARTITIONS.split('/'))
- with open(ab_partitions_file) as f:
- ab_partitions_lines = f.readlines()
- ab_partitions = [line.strip() for line in ab_partitions_lines]
+ ab_partitions_lines = common.ReadFromInputFile(
+ input_file, AB_PARTITIONS).split("\n")
+ ab_partitions = [line.strip() for line in ab_partitions_lines]
# Assert that all super_block_devices are in ab_partitions
super_device_not_updated = [partition for partition in super_block_devices
if partition not in ab_partitions]
@@ -681,15 +680,6 @@
"{} is in super_block_devices but not in {}".format(
super_device_not_updated, AB_PARTITIONS)
# ab_partitions -= (dynamic_partition_list - super_block_devices)
- new_ab_partitions = common.MakeTempFile(
- prefix="ab_partitions", suffix=".txt")
- with open(new_ab_partitions, 'w') as f:
- for partition in ab_partitions:
- if (partition in dynamic_partition_list and
- partition not in super_block_devices):
- logger.info("Dropping %s from ab_partitions.txt", partition)
- continue
- f.write(partition + "\n")
to_delete = [AB_PARTITIONS]
# Always skip postinstall for a retrofit update.
@@ -702,24 +692,28 @@
# Remove the existing partition images as well as the map files.
to_delete += list(replace.values())
to_delete += ['IMAGES/{}.map'.format(dev) for dev in super_block_devices]
-
- common.ZipDelete(target_file, to_delete)
-
- target_zip = zipfile.ZipFile(target_file, 'a', allowZip64=True)
+ for item in to_delete:
+ os.unlink(os.path.join(input_file, item))
# Write super_{foo}.img as {foo}.img.
for src, dst in replace.items():
- assert src in namelist, \
+ assert DoesInputFileContain(input_file, src), \
'Missing {} in {}; {} cannot be written'.format(src, input_file, dst)
- unzipped_file = os.path.join(input_tmp, *src.split('/'))
- common.ZipWrite(target_zip, unzipped_file, arcname=dst)
+ source_path = os.path.join(input_file, *src.split("/"))
+ target_path = os.path.join(input_file, *dst.split("/"))
+ os.rename(source_path, target_path)
# Write new ab_partitions.txt file
- common.ZipWrite(target_zip, new_ab_partitions, arcname=AB_PARTITIONS)
+ new_ab_partitions = os.paht.join(input_file, AB_PARTITIONS)
+ with open(new_ab_partitions, 'w') as f:
+ for partition in ab_partitions:
+ if (partition in dynamic_partition_list and
+ partition not in super_block_devices):
+ logger.info("Dropping %s from ab_partitions.txt", partition)
+ continue
+ f.write(partition + "\n")
- target_zip.close()
-
- return target_file
+ return input_file
def GetTargetFilesZipForCustomImagesUpdates(input_file, custom_images):
@@ -828,8 +822,20 @@
return pattern.search(output) is not None
+def ExtractOrCopyTargetFiles(target_file):
+ if os.path.isdir(target_file):
+ return CopyTargetFilesDir(target_file)
+ else:
+ return ExtractTargetFiles(target_file)
+
+
def GenerateAbOtaPackage(target_file, output_file, source_file=None):
"""Generates an Android OTA package that has A/B update payload."""
+ # If input target_files are directories, create a copy so that we can modify
+ # them directly
+ target_file = ExtractOrCopyTargetFiles(target_file)
+ if source_file is not None:
+ source_file = ExtractOrCopyTargetFiles(source_file)
# Stage the output zip package for package signing.
if not OPTIONS.no_signing:
staging_file = common.MakeTempFile(suffix='.zip')
@@ -840,6 +846,7 @@
allowZip64=True)
if source_file is not None:
+ source_file = ExtractTargetFiles(source_file)
assert "ab_partitions" in OPTIONS.source_info_dict, \
"META/ab_partitions.txt is required for ab_update."
assert "ab_partitions" in OPTIONS.target_info_dict, \
@@ -909,6 +916,19 @@
logger.info(
"VABC Compression algorithm is set to 'none', disabling VABC xor")
OPTIONS.enable_vabc_xor = False
+
+ if OPTIONS.enable_vabc_xor:
+ api_level = -1
+ if source_info is not None:
+ api_level = source_info.vendor_api_level
+ if api_level == -1:
+ api_level = target_info.vendor_api_level
+
+ # XOR is only supported on T and higher.
+ if api_level < 33:
+ logger.error("VABC XOR not supported on this vendor, disabling")
+ OPTIONS.enable_vabc_xor = False
+
additional_args = []
# Prepare custom images.
@@ -923,16 +943,16 @@
elif OPTIONS.partial:
target_file = GetTargetFilesZipForPartialUpdates(target_file,
OPTIONS.partial)
- elif OPTIONS.vabc_compression_param:
+ if OPTIONS.vabc_compression_param:
target_file = GetTargetFilesZipForCustomVABCCompression(
target_file, OPTIONS.vabc_compression_param)
- elif OPTIONS.skip_postinstall:
+ if OPTIONS.skip_postinstall:
target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
# Target_file may have been modified, reparse ab_partitions
- with zipfile.ZipFile(target_file, allowZip64=True) as zfp:
- target_info.info_dict['ab_partitions'] = zfp.read(
- AB_PARTITIONS).decode().strip().split("\n")
+ target_info.info_dict['ab_partitions'] = common.ReadFromInputFile(target_file,
+ AB_PARTITIONS).strip().split("\n")
+ from check_target_files_vintf import CheckVintfIfTrebleEnabled
CheckVintfIfTrebleEnabled(target_file, target_info)
# Metadata to comply with Android OTA package format.
@@ -966,6 +986,9 @@
additional_args += ["--security_patch_level", security_patch_level]
+ if OPTIONS.max_threads:
+ additional_args += ["--max_threads", OPTIONS.max_threads]
+
additional_args += ["--enable_zucchini=" +
str(OPTIONS.enable_zucchini).lower()]
@@ -1029,15 +1052,13 @@
# If dm-verity is supported for the device, copy contents of care_map
# into A/B OTA package.
- target_zip = zipfile.ZipFile(target_file, "r", allowZip64=True)
if target_info.get("avb_enable") == "true":
- care_map_list = [x for x in ["care_map.pb", "care_map.txt"] if
- "META/" + x in target_zip.namelist()]
-
# Adds care_map if either the protobuf format or the plain text one exists.
- if care_map_list:
- care_map_name = care_map_list[0]
- care_map_data = target_zip.read("META/" + care_map_name)
+ for care_map_name in ["care_map.pb", "care_map.txt"]:
+ if not DoesInputFileContain(target_file, "META/" + care_map_name):
+ continue
+ care_map_data = common.ReadBytesFromInputFile(
+ target_file, "META/" + care_map_name)
# In order to support streaming, care_map needs to be packed as
# ZIP_STORED.
common.ZipWriteStr(output_zip, care_map_name, care_map_data,
@@ -1047,16 +1068,14 @@
# Add the source apex version for incremental ota updates, and write the
# result apex info to the ota package.
- ota_apex_info = ota_utils.ConstructOtaApexInfo(target_zip, source_file)
+ ota_apex_info = ota_utils.ConstructOtaApexInfo(target_file, source_file)
if ota_apex_info is not None:
common.ZipWriteStr(output_zip, "apex_info.pb", ota_apex_info,
compress_type=zipfile.ZIP_STORED)
- target_zip.close()
-
# We haven't written the metadata entry yet, which will be handled in
# FinalizeMetadata().
- output_zip.close()
+ common.ZipClose(output_zip)
FinalizeMetadata(metadata, staging_file, output_file,
package_key=OPTIONS.package_key)
@@ -1176,6 +1195,12 @@
OPTIONS.vabc_compression_param = a.lower()
elif o == "--security_patch_level":
OPTIONS.security_patch_level = a
+ elif o in ("--max_threads"):
+ if a.isdigit():
+ OPTIONS.max_threads = a
+ else:
+ raise ValueError("Cannot parse value %r for option %r - only "
+ "integers are allowed." % (a, o))
else:
return False
return True
@@ -1227,6 +1252,7 @@
"enable_lz4diff=",
"vabc_compression_param=",
"security_patch_level=",
+ "max_threads=",
], extra_option_handler=option_handler)
common.InitLogging()
@@ -1244,7 +1270,7 @@
if OPTIONS.extracted_input is not None:
OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.extracted_input)
else:
- OPTIONS.info_dict = ParseInfoDict(args[0])
+ OPTIONS.info_dict = common.LoadInfoDict(args[0])
if OPTIONS.wipe_user_data:
if not OPTIONS.vabc_downgrade:
diff --git a/tools/releasetools/ota_utils.py b/tools/releasetools/ota_utils.py
index c419537..9067e78 100644
--- a/tools/releasetools/ota_utils.py
+++ b/tools/releasetools/ota_utils.py
@@ -22,7 +22,8 @@
import ota_metadata_pb2
import common
-from common import (ZipDelete, OPTIONS, MakeTempFile,
+import fnmatch
+from common import (ZipDelete, DoesInputFileContain, ReadBytesFromInputFile, OPTIONS, MakeTempFile,
ZipWriteStr, BuildInfo, LoadDictionaryFromFile,
SignFile, PARTITIONS_WITH_BUILD_PROP, PartitionBuildProps,
GetRamdiskFormat, ParseUpdateEngineConfig)
@@ -44,7 +45,8 @@
METADATA_NAME = 'META-INF/com/android/metadata'
METADATA_PROTO_NAME = 'META-INF/com/android/metadata.pb'
-UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*', 'RADIO/*']
+UNZIP_PATTERN = ['IMAGES/*', 'META/*', 'OTA/*',
+ 'RADIO/*', '*/build.prop', '*/default.prop', '*/build.default', "*/etc/vintf/*"]
SECURITY_PATCH_LEVEL_PROP_NAME = "ro.build.version.security_patch"
@@ -626,12 +628,10 @@
"""If applicable, add the source version to the apex info."""
def _ReadApexInfo(input_zip):
- if "META/apex_info.pb" not in input_zip.namelist():
+ if not DoesInputFileContain(input_zip, "META/apex_info.pb"):
logger.warning("target_file doesn't contain apex_info.pb %s", input_zip)
return None
-
- with input_zip.open("META/apex_info.pb", "r") as zfp:
- return zfp.read()
+ return ReadBytesFromInputFile(input_zip, "META/apex_info.pb")
target_apex_string = _ReadApexInfo(target_zip)
# Return early if the target apex info doesn't exist or is empty.
@@ -642,8 +642,7 @@
if not source_file:
return target_apex_string
- with zipfile.ZipFile(source_file, "r", allowZip64=True) as source_zip:
- source_apex_string = _ReadApexInfo(source_zip)
+ source_apex_string = _ReadApexInfo(source_file)
if not source_apex_string:
return target_apex_string
@@ -727,7 +726,7 @@
logger.info("target files %s is already extracted", path)
return path
extracted_dir = common.MakeTempDir("target_files")
- common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN)
+ common.UnzipToDir(path, extracted_dir, UNZIP_PATTERN + [""])
return extracted_dir
@@ -839,6 +838,9 @@
major_version, minor_version = ParseUpdateEngineConfig(
os.path.join(target_dir, "META", "update_engine_config.txt"))
+ if source_file:
+ major_version, minor_version = ParseUpdateEngineConfig(
+ os.path.join(source_dir, "META", "update_engine_config.txt"))
if self.minor_version:
minor_version = self.minor_version
cmd.extend(["--major_version", str(major_version)])
@@ -1037,3 +1039,32 @@
assert metadata_total <= payload_size
return (payload_offset, metadata_total)
+
+
+def Fnmatch(filename, pattersn):
+ return any([fnmatch.fnmatch(filename, pat) for pat in pattersn])
+
+
+def CopyTargetFilesDir(input_dir):
+ output_dir = common.MakeTempDir("target_files")
+ IMAGES_DIR = ["IMAGES", "PREBUILT_IMAGES", "RADIO"]
+ for subdir in IMAGES_DIR:
+ if not os.path.exists(os.path.join(input_dir, subdir)):
+ continue
+ shutil.copytree(os.path.join(input_dir, subdir), os.path.join(
+ output_dir, subdir), dirs_exist_ok=True, copy_function=os.link)
+ shutil.copytree(os.path.join(input_dir, "META"), os.path.join(
+ output_dir, "META"), dirs_exist_ok=True)
+
+ for (dirpath, _, filenames) in os.walk(input_dir):
+ for filename in filenames:
+ path = os.path.join(dirpath, filename)
+ relative_path = path.removeprefix(input_dir).removeprefix("/")
+ if not Fnmatch(relative_path, UNZIP_PATTERN):
+ continue
+ if filename.endswith(".prop") or filename == "prop.default" or "/etc/vintf/" in relative_path:
+ target_path = os.path.join(
+ output_dir, relative_path)
+ os.makedirs(os.path.dirname(target_path), exist_ok=True)
+ shutil.copy(path, target_path)
+ return output_dir
diff --git a/tools/releasetools/payload_signer.py b/tools/releasetools/payload_signer.py
index 4f342ac..9933aef 100644
--- a/tools/releasetools/payload_signer.py
+++ b/tools/releasetools/payload_signer.py
@@ -36,11 +36,16 @@
(OPTIONS.package_key) and calls openssl for the signing works.
"""
- def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None):
+ def __init__(self, package_key=None, private_key_suffix=None, pw=None, payload_signer=None,
+ payload_signer_args=None, payload_signer_maximum_signature_size=None):
if package_key is None:
package_key = OPTIONS.package_key
if private_key_suffix is None:
private_key_suffix = OPTIONS.private_key_suffix
+ if payload_signer_args is None:
+ payload_signer_args = OPTIONS.payload_signer_args
+ if payload_signer_maximum_signature_size is None:
+ payload_signer_maximum_signature_size = OPTIONS.payload_signer_maximum_signature_size
if payload_signer is None:
# Prepare the payload signing key.
@@ -59,10 +64,10 @@
signing_key)
else:
self.signer = payload_signer
- self.signer_args = OPTIONS.payload_signer_args
- if OPTIONS.payload_signer_maximum_signature_size:
+ self.signer_args = payload_signer_args
+ if payload_signer_maximum_signature_size:
self.maximum_signature_size = int(
- OPTIONS.payload_signer_maximum_signature_size)
+ payload_signer_maximum_signature_size)
else:
# The legacy config uses RSA2048 keys.
logger.warning("The maximum signature size for payload signer is not"
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 4a12e74..8291448 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -908,7 +908,7 @@
certs_zip = zipfile.ZipFile(temp_file, "w", allowZip64=True)
for k in keys:
common.ZipWrite(certs_zip, k)
- certs_zip.close()
+ common.ZipClose(certs_zip)
common.ZipWriteStr(output_zip, filename, temp_file.getvalue())
@@ -1545,8 +1545,8 @@
platform_api_level, codename_to_api_level_map,
compressed_extension)
- input_zip.close()
- output_zip.close()
+ common.ZipClose(input_zip)
+ common.ZipClose(output_zip)
if OPTIONS.vendor_partitions and OPTIONS.vendor_otatools:
BuildVendorPartitions(args[1])
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 8c9655ad0..2dfd8c7 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -222,17 +222,17 @@
info_dict = copy.deepcopy(self.TEST_INFO_FINGERPRINT_DICT)
build_info = common.BuildInfo(info_dict)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/build-id/'
- 'version-incremental:build-type/build-tags', build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/build-id/'
+ 'version-incremental:build-type/build-tags', build_info.fingerprint)
build_props = info_dict['build.prop'].build_props
del build_props['ro.build.id']
build_props['ro.build.legacy.id'] = 'legacy-build-id'
build_info = common.BuildInfo(info_dict, use_legacy_id=True)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
self.assertRaises(common.ExternalError, common.BuildInfo, info_dict, None,
False)
@@ -241,9 +241,9 @@
info_dict['vbmeta_digest'] = 'abcde12345'
build_info = common.BuildInfo(info_dict, use_legacy_id=False)
self.assertEqual(
- 'product-brand/product-name/product-device:version-release/'
- 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
- build_info.fingerprint)
+ 'product-brand/product-name/product-device:version-release/'
+ 'legacy-build-id.abcde123/version-incremental:build-type/build-tags',
+ build_info.fingerprint)
def test___getitem__(self):
target_info = common.BuildInfo(self.TEST_INFO_DICT, None)
@@ -376,7 +376,7 @@
info_dict['build.prop'].build_props[
'ro.product.property_source_order'] = 'bad-source'
with self.assertRaisesRegexp(common.ExternalError,
- 'Invalid ro.product.property_source_order'):
+ 'Invalid ro.product.property_source_order'):
info = common.BuildInfo(info_dict, None)
info.GetBuildProp('ro.product.device')
@@ -452,14 +452,16 @@
test_file.write(bytes(data))
test_file.close()
- expected_stat = os.stat(test_file_name)
expected_mode = extra_zipwrite_args.get("perms", 0o644)
expected_compress_type = extra_zipwrite_args.get("compress_type",
zipfile.ZIP_STORED)
- time.sleep(5) # Make sure the atime/mtime will change measurably.
+ # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+ # the timestamp after writing.
+ os.utime(test_file_name, (1234567, 1234567))
+ expected_stat = os.stat(test_file_name)
common.ZipWrite(zip_file, test_file_name, **extra_zipwrite_args)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, arcname, sha1_hash.hexdigest(),
test_file_name, expected_stat, expected_mode,
@@ -480,8 +482,6 @@
try:
expected_compress_type = extra_args.get("compress_type",
zipfile.ZIP_STORED)
- time.sleep(5) # Make sure the atime/mtime will change measurably.
-
if not isinstance(zinfo_or_arcname, zipfile.ZipInfo):
arcname = zinfo_or_arcname
expected_mode = extra_args.get("perms", 0o644)
@@ -494,7 +494,7 @@
expected_mode = extra_args.get("perms", zinfo_perms)
common.ZipWriteStr(zip_file, zinfo_or_arcname, contents, **extra_args)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, arcname, sha1(contents).hexdigest(),
expected_mode=expected_mode,
@@ -528,15 +528,17 @@
test_file.write(data)
test_file.close()
+ # Arbitrary timestamp, just to make sure common.ZipWrite() restores
+ # the timestamp after writing.
+ os.utime(test_file_name, (1234567, 1234567))
expected_stat = os.stat(test_file_name)
expected_mode = 0o644
expected_compress_type = extra_args.get("compress_type",
zipfile.ZIP_STORED)
- time.sleep(5) # Make sure the atime/mtime will change measurably.
common.ZipWrite(zip_file, test_file_name, **extra_args)
common.ZipWriteStr(zip_file, arcname_small, small, **extra_args)
- zip_file.close()
+ common.ZipClose(zip_file)
# Verify the contents written by ZipWrite().
self._verify(zip_file, zip_file_name, arcname_large,
@@ -551,6 +553,12 @@
os.remove(zip_file_name)
os.remove(test_file_name)
+ def _test_reset_ZIP64_LIMIT(self, func, *args):
+ default_limit = (1 << 31) - 1
+ self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+ func(*args)
+ self.assertEqual(default_limit, zipfile.ZIP64_LIMIT)
+
def test_ZipWrite(self):
file_contents = os.urandom(1024)
self._test_ZipWrite(file_contents)
@@ -575,7 +583,7 @@
})
def test_ZipWrite_resets_ZIP64_LIMIT(self):
- self._test_ZipWrite("")
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWrite, "")
def test_ZipWriteStr(self):
random_string = os.urandom(1024)
@@ -626,9 +634,9 @@
})
def test_ZipWriteStr_resets_ZIP64_LIMIT(self):
- self._test_ZipWriteStr('foo', b'')
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, 'foo', b'')
zinfo = zipfile.ZipInfo(filename="foo")
- self._test_ZipWriteStr(zinfo, b'')
+ self._test_reset_ZIP64_LIMIT(self._test_ZipWriteStr, zinfo, b'')
def test_bug21309935(self):
zip_file = tempfile.NamedTemporaryFile(delete=False)
@@ -650,7 +658,7 @@
zinfo = zipfile.ZipInfo(filename="qux")
zinfo.external_attr = 0o700 << 16
common.ZipWriteStr(zip_file, zinfo, random_string, perms=0o400)
- zip_file.close()
+ common.ZipClose(zip_file)
self._verify(zip_file, zip_file_name, "foo",
sha1(random_string).hexdigest(),
@@ -677,7 +685,7 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Test1')
common.ZipWrite(output_zip, entry_file.name, arcname='Test2')
common.ZipWrite(output_zip, entry_file.name, arcname='Test3')
- output_zip.close()
+ common.ZipClose(output_zip)
zip_file.close()
try:
@@ -725,8 +733,8 @@
common.ZipWrite(output_zip, entry_file.name, arcname='Foo3')
common.ZipWrite(output_zip, entry_file.name, arcname='Bar4')
common.ZipWrite(output_zip, entry_file.name, arcname='Dir5/Baz5')
- output_zip.close()
- output_zip.close()
+ common.ZipClose(output_zip)
+ common.ZipClose(output_zip)
return zip_file
@test_utils.SkipIfExternalToolsUnavailable()
@@ -813,9 +821,9 @@
)
APKCERTS_CERTMAP1 = {
- 'RecoveryLocalizer.apk': 'certs/devkey',
- 'Settings.apk': 'build/make/target/product/security/platform',
- 'TV.apk': 'PRESIGNED',
+ 'RecoveryLocalizer.apk' : 'certs/devkey',
+ 'Settings.apk' : 'build/make/target/product/security/platform',
+ 'TV.apk' : 'PRESIGNED',
}
APKCERTS_TXT2 = (
@@ -830,10 +838,10 @@
)
APKCERTS_CERTMAP2 = {
- 'Compressed1.apk': 'certs/compressed1',
- 'Compressed2a.apk': 'certs/compressed2',
- 'Compressed2b.apk': 'certs/compressed2',
- 'Compressed3.apk': 'certs/compressed3',
+ 'Compressed1.apk' : 'certs/compressed1',
+ 'Compressed2a.apk' : 'certs/compressed2',
+ 'Compressed2b.apk' : 'certs/compressed2',
+ 'Compressed3.apk' : 'certs/compressed3',
}
APKCERTS_TXT3 = (
@@ -842,7 +850,7 @@
)
APKCERTS_CERTMAP3 = {
- 'Compressed4.apk': 'certs/compressed4',
+ 'Compressed4.apk' : 'certs/compressed4',
}
# Test parsing with no optional fields, both optional fields, and only the
@@ -859,9 +867,9 @@
)
APKCERTS_CERTMAP4 = {
- 'RecoveryLocalizer.apk': 'certs/devkey',
- 'Settings.apk': 'build/make/target/product/security/platform',
- 'TV.apk': 'PRESIGNED',
+ 'RecoveryLocalizer.apk' : 'certs/devkey',
+ 'Settings.apk' : 'build/make/target/product/security/platform',
+ 'TV.apk' : 'PRESIGNED',
}
def setUp(self):
@@ -965,7 +973,7 @@
extracted_from_privkey = common.ExtractAvbPublicKey('avbtool', privkey)
extracted_from_pubkey = common.ExtractAvbPublicKey('avbtool', pubkey)
with open(extracted_from_privkey, 'rb') as privkey_fp, \
- open(extracted_from_pubkey, 'rb') as pubkey_fp:
+ open(extracted_from_pubkey, 'rb') as pubkey_fp:
self.assertEqual(privkey_fp.read(), pubkey_fp.read())
def test_ParseCertificate(self):
@@ -1229,8 +1237,7 @@
self.assertEqual(
'1-5 9-10',
sparse_image.file_map['//system/file1'].extra['text_str'])
- self.assertTrue(
- sparse_image.file_map['//system/file2'].extra['incomplete'])
+ self.assertTrue(sparse_image.file_map['//system/file2'].extra['incomplete'])
self.assertTrue(
sparse_image.file_map['/system/app/file3'].extra['incomplete'])
@@ -1338,7 +1345,7 @@
'recovery_api_version': 3,
'fstab_version': 2,
'system_root_image': 'true',
- 'no_recovery': 'true',
+ 'no_recovery' : 'true',
'recovery_as_boot': 'true',
}
@@ -1659,7 +1666,6 @@
self.assertRaises(common.ExternalError, common._GenerateGkiCertificate,
test_file.name, 'generic_kernel')
-
class InstallRecoveryScriptFormatTest(test_utils.ReleaseToolsTestCase):
"""Checks the format of install-recovery.sh.
@@ -1669,7 +1675,7 @@
def setUp(self):
self._tempdir = common.MakeTempDir()
# Create a fake dict that contains the fstab info for boot&recovery.
- self._info = {"fstab": {}}
+ self._info = {"fstab" : {}}
fake_fstab = [
"/dev/soc.0/by-name/boot /boot emmc defaults defaults",
"/dev/soc.0/by-name/recovery /recovery emmc defaults defaults"]
@@ -2016,11 +2022,11 @@
input_zip, 'odm', placeholder_values)
self.assertEqual({
- 'ro.odm.build.date.utc': '1578430045',
- 'ro.odm.build.fingerprint':
- 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
- 'ro.product.odm.device': 'coral',
- 'ro.product.odm.name': 'product1',
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
+ 'ro.product.odm.name': 'product1',
}, partition_props.build_props)
with zipfile.ZipFile(input_file, 'r', allowZip64=True) as input_zip:
@@ -2203,8 +2209,8 @@
copied_props = copy.deepcopy(partition_props)
self.assertEqual({
- 'ro.odm.build.date.utc': '1578430045',
- 'ro.odm.build.fingerprint':
- 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
- 'ro.product.odm.device': 'coral',
+ 'ro.odm.build.date.utc': '1578430045',
+ 'ro.odm.build.fingerprint':
+ 'google/coral/coral:10/RP1A.200325.001/6337676:user/dev-keys',
+ 'ro.product.odm.device': 'coral',
}, copied_props.build_props)
diff --git a/tools/releasetools/verity_utils.py b/tools/releasetools/verity_utils.py
index 755241d..dddb7f4 100644
--- a/tools/releasetools/verity_utils.py
+++ b/tools/releasetools/verity_utils.py
@@ -141,11 +141,7 @@
self.footer_type = footer_type
self.avbtool = avbtool
self.algorithm = algorithm
- self.key_path = key_path
- if key_path and not os.path.exists(key_path) and OPTIONS.search_path:
- new_key_path = os.path.join(OPTIONS.search_path, key_path)
- if os.path.exists(new_key_path):
- self.key_path = new_key_path
+ self.key_path = common.ResolveAVBSigningPathArgs(key_path)
self.salt = salt
self.signing_args = signing_args
diff --git a/tools/sbom/Android.bp b/tools/sbom/Android.bp
new file mode 100644
index 0000000..4837dde
--- /dev/null
+++ b/tools/sbom/Android.bp
@@ -0,0 +1,57 @@
+// Copyright (C) 2023 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+package {
+ default_applicable_licenses: ["Android-Apache-2.0"],
+}
+
+python_binary_host {
+ name: "generate-sbom",
+ srcs: [
+ "generate-sbom.py",
+ ],
+ version: {
+ py3: {
+ embedded_launcher: true,
+ },
+ },
+ libs: [
+ "metadata_file_proto_py",
+ "libprotobuf-python",
+ "sbom_lib",
+ ],
+}
+
+python_library_host {
+ name: "sbom_lib",
+ srcs: [
+ "sbom_data.py",
+ "sbom_writers.py",
+ ],
+}
+
+python_test_host {
+ name: "sbom_writers_test",
+ main: "sbom_writers_test.py",
+ srcs: [
+ "sbom_writers_test.py",
+ ],
+ data: [
+ "testdata/*",
+ ],
+ libs: [
+ "sbom_lib",
+ ],
+ test_suites: ["general-tests"],
+}
diff --git a/tools/sbom/generate-sbom.py b/tools/sbom/generate-sbom.py
new file mode 100755
index 0000000..2415f7e
--- /dev/null
+++ b/tools/sbom/generate-sbom.py
@@ -0,0 +1,574 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Generate the SBOM of the current target product in SPDX format.
+Usage example:
+ generate-sbom.py --output_file out/target/product/vsoc_x86_64/sbom.spdx \
+ --metadata out/target/product/vsoc_x86_64/sbom-metadata.csv \
+ --build_version $(cat out/target/product/vsoc_x86_64/build_fingerprint.txt) \
+ --product_mfr=Google
+"""
+
+import argparse
+import csv
+import datetime
+import google.protobuf.text_format as text_format
+import hashlib
+import os
+import metadata_file_pb2
+import sbom_data
+import sbom_writers
+
+
+# Package type
+PKG_SOURCE = 'SOURCE'
+PKG_UPSTREAM = 'UPSTREAM'
+PKG_PREBUILT = 'PREBUILT'
+
+# Security tag
+NVD_CPE23 = 'NVD-CPE2.3:'
+
+# Report
+ISSUE_NO_METADATA = 'No metadata generated in Make for installed files:'
+ISSUE_NO_METADATA_FILE = 'No METADATA file found for installed file:'
+ISSUE_METADATA_FILE_INCOMPLETE = 'METADATA file incomplete:'
+ISSUE_UNKNOWN_SECURITY_TAG_TYPE = 'Unknown security tag type:'
+ISSUE_INSTALLED_FILE_NOT_EXIST = 'Non-exist installed files:'
+INFO_METADATA_FOUND_FOR_PACKAGE = 'METADATA file found for packages:'
+
+SOONG_PREBUILT_MODULE_TYPES = [
+ 'android_app_import',
+ 'android_library_import',
+ 'cc_prebuilt_binary',
+ 'cc_prebuilt_library',
+ 'cc_prebuilt_library_headers',
+ 'cc_prebuilt_library_shared',
+ 'cc_prebuilt_library_static',
+ 'cc_prebuilt_object',
+ 'dex_import',
+ 'java_import',
+ 'java_sdk_library_import',
+ 'java_system_modules_import',
+ 'libclang_rt_prebuilt_library_static',
+ 'libclang_rt_prebuilt_library_shared',
+ 'llvm_prebuilt_library_static',
+ 'ndk_prebuilt_object',
+ 'ndk_prebuilt_shared_stl',
+ 'nkd_prebuilt_static_stl',
+ 'prebuilt_apex',
+ 'prebuilt_bootclasspath_fragment',
+ 'prebuilt_dsp',
+ 'prebuilt_firmware',
+ 'prebuilt_kernel_modules',
+ 'prebuilt_rfsa',
+ 'prebuilt_root',
+ 'rust_prebuilt_dylib',
+ 'rust_prebuilt_library',
+ 'rust_prebuilt_rlib',
+ 'vndk_prebuilt_shared',
+]
+
+
+def get_args():
+ parser = argparse.ArgumentParser()
+ parser.add_argument('-v', '--verbose', action='store_true', default=False, help='Print more information.')
+ parser.add_argument('--output_file', required=True, help='The generated SBOM file in SPDX format.')
+ parser.add_argument('--metadata', required=True, help='The SBOM metadata file path.')
+ parser.add_argument('--build_version', required=True, help='The build version.')
+ parser.add_argument('--product_mfr', required=True, help='The product manufacturer.')
+ parser.add_argument('--json', action='store_true', default=False, help='Generated SBOM file in SPDX JSON format')
+ parser.add_argument('--unbundled_apk', action='store_true', default=False, help='Generate SBOM for unbundled APKs')
+ parser.add_argument('--unbundled_apex', action='store_true', default=False, help='Generate SBOM for unbundled APEXs')
+
+ return parser.parse_args()
+
+
+def log(*info):
+ if args.verbose:
+ for i in info:
+ print(i)
+
+
+def encode_for_spdxid(s):
+ """Simple encode for string values used in SPDXID which uses the charset of A-Za-Z0-9.-"""
+ result = ''
+ for c in s:
+ if c.isalnum() or c in '.-':
+ result += c
+ elif c in '_@/':
+ result += '-'
+ else:
+ result += '0x' + c.encode('utf-8').hex()
+
+ return result.lstrip('-')
+
+
+def new_package_id(package_name, type):
+ return f'SPDXRef-{type}-{encode_for_spdxid(package_name)}'
+
+
+def new_file_id(file_path):
+ return f'SPDXRef-{encode_for_spdxid(file_path)}'
+
+
+def checksum(file_path):
+ h = hashlib.sha1()
+ if os.path.islink(file_path):
+ h.update(os.readlink(file_path).encode('utf-8'))
+ else:
+ with open(file_path, 'rb') as f:
+ h.update(f.read())
+ return f'SHA1: {h.hexdigest()}'
+
+
+def is_soong_prebuilt_module(file_metadata):
+ return (file_metadata['soong_module_type'] and
+ file_metadata['soong_module_type'] in SOONG_PREBUILT_MODULE_TYPES)
+
+
+def is_source_package(file_metadata):
+ module_path = file_metadata['module_path']
+ return module_path.startswith('external/') and not is_prebuilt_package(file_metadata)
+
+
+def is_prebuilt_package(file_metadata):
+ module_path = file_metadata['module_path']
+ if module_path:
+ return (module_path.startswith('prebuilts/') or
+ is_soong_prebuilt_module(file_metadata) or
+ file_metadata['is_prebuilt_make_module'])
+
+ kernel_module_copy_files = file_metadata['kernel_module_copy_files']
+ if kernel_module_copy_files and not kernel_module_copy_files.startswith('ANDROID-GEN:'):
+ return True
+
+ return False
+
+
+def get_source_package_info(file_metadata, metadata_file_path):
+ """Return source package info exists in its METADATA file, currently including name, security tag
+ and external SBOM reference.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ if not metadata_file_path:
+ return file_metadata['module_path'], []
+
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ external_refs = []
+ for tag in metadata_proto.third_party.security.tag:
+ if tag.lower().startswith((NVD_CPE23 + 'cpe:2.3:').lower()):
+ external_refs.append(
+ sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe23Type,
+ locator=tag.removeprefix(NVD_CPE23)))
+ elif tag.lower().startswith((NVD_CPE23 + 'cpe:/').lower()):
+ external_refs.append(
+ sbom_data.PackageExternalRef(category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe22Type,
+ locator=tag.removeprefix(NVD_CPE23)))
+
+ if metadata_proto.name:
+ return metadata_proto.name, external_refs
+ else:
+ return os.path.basename(metadata_file_path), external_refs # return the directory name only as package name
+
+
+def get_prebuilt_package_name(file_metadata, metadata_file_path):
+ """Return name of a prebuilt package, which can be from the METADATA file, metadata file path,
+ module path or kernel module's source path if the installed file is a kernel module.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ name = None
+ if metadata_file_path:
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.name:
+ name = metadata_proto.name
+ else:
+ name = metadata_file_path
+ elif file_metadata['module_path']:
+ name = file_metadata['module_path']
+ elif file_metadata['kernel_module_copy_files']:
+ src_path = file_metadata['kernel_module_copy_files'].split(':')[0]
+ name = os.path.dirname(src_path)
+
+ return name.removeprefix('prebuilts/').replace('/', '-')
+
+
+def get_metadata_file_path(file_metadata):
+ """Search for METADATA file of a package and return its path."""
+ metadata_path = ''
+ if file_metadata['module_path']:
+ metadata_path = file_metadata['module_path']
+ elif file_metadata['kernel_module_copy_files']:
+ metadata_path = os.path.dirname(file_metadata['kernel_module_copy_files'].split(':')[0])
+
+ while metadata_path and not os.path.exists(metadata_path + '/METADATA'):
+ metadata_path = os.path.dirname(metadata_path)
+
+ return metadata_path
+
+
+def get_package_version(metadata_file_path):
+ """Return a package's version in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ return metadata_proto.third_party.version
+
+
+def get_package_homepage(metadata_file_path):
+ """Return a package's homepage URL in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.homepage:
+ return metadata_proto.third_party.homepage
+ for url in metadata_proto.third_party.url:
+ if url.type == metadata_file_pb2.URL.Type.HOMEPAGE:
+ return url.value
+
+ return None
+
+
+def get_package_download_location(metadata_file_path):
+ """Return a package's code repository URL in its METADATA file."""
+ if not metadata_file_path:
+ return None
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.url:
+ urls = sorted(metadata_proto.third_party.url, key=lambda url: url.type)
+ if urls[0].type != metadata_file_pb2.URL.Type.HOMEPAGE:
+ return urls[0].value
+ elif len(urls) > 1:
+ return urls[1].value
+
+ return None
+
+
+def get_sbom_fragments(installed_file_metadata, metadata_file_path):
+ """Return SPDX fragment of source/prebuilt packages, which usually contains a SOURCE/PREBUILT
+ package, a UPSTREAM package and an external SBOM document reference if sbom_ref defined in its
+ METADATA file.
+
+ See go/android-spdx and go/android-sbom-gen for more details.
+ """
+ external_doc_ref = None
+ packages = []
+ relationships = []
+
+ # Info from METADATA file
+ homepage = get_package_homepage(metadata_file_path)
+ version = get_package_version(metadata_file_path)
+ download_location = get_package_download_location(metadata_file_path)
+
+ if is_source_package(installed_file_metadata):
+ # Source fork packages
+ name, external_refs = get_source_package_info(installed_file_metadata, metadata_file_path)
+ source_package_id = new_package_id(name, PKG_SOURCE)
+ source_package = sbom_data.Package(id=source_package_id, name=name, version=args.build_version,
+ download_location=sbom_data.VALUE_NONE,
+ supplier='Organization: ' + args.product_mfr,
+ external_refs=external_refs)
+
+ upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+ upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version=version,
+ supplier=('Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+ download_location=download_location)
+ packages += [source_package, upstream_package]
+ relationships.append(sbom_data.Relationship(id1=source_package_id,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=upstream_package_id))
+ elif is_prebuilt_package(installed_file_metadata):
+ # Prebuilt fork packages
+ name = get_prebuilt_package_name(installed_file_metadata, metadata_file_path)
+ prebuilt_package_id = new_package_id(name, PKG_PREBUILT)
+ prebuilt_package = sbom_data.Package(id=prebuilt_package_id,
+ name=name,
+ download_location=sbom_data.VALUE_NONE,
+ version=version if version else args.build_version,
+ supplier='Organization: ' + args.product_mfr)
+
+ upstream_package_id = new_package_id(name, PKG_UPSTREAM)
+ upstream_package = sbom_data.Package(id=upstream_package_id, name=name, version = version,
+ supplier=('Organization: ' + homepage) if homepage else sbom_data.VALUE_NOASSERTION,
+ download_location=download_location)
+ packages += [prebuilt_package, upstream_package]
+ relationships.append(sbom_data.Relationship(id1=prebuilt_package_id,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=upstream_package_id))
+
+ if metadata_file_path:
+ metadata_proto = metadata_file_protos[metadata_file_path]
+ if metadata_proto.third_party.WhichOneof('sbom') == 'sbom_ref':
+ sbom_url = metadata_proto.third_party.sbom_ref.url
+ sbom_checksum = metadata_proto.third_party.sbom_ref.checksum
+ upstream_element_id = metadata_proto.third_party.sbom_ref.element_id
+ if sbom_url and sbom_checksum and upstream_element_id:
+ doc_ref_id = f'DocumentRef-{PKG_UPSTREAM}-{encode_for_spdxid(name)}'
+ external_doc_ref = sbom_data.DocumentExternalReference(id=doc_ref_id,
+ uri=sbom_url,
+ checksum=sbom_checksum)
+ relationships.append(
+ sbom_data.Relationship(id1=upstream_package_id,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=doc_ref_id + ':' + upstream_element_id))
+
+ return external_doc_ref, packages, relationships
+
+
+def generate_package_verification_code(files):
+ checksums = [file.checksum for file in files]
+ checksums.sort()
+ h = hashlib.sha1()
+ h.update(''.join(checksums).encode(encoding='utf-8'))
+ return h.hexdigest()
+
+
+def save_report(report_file_path, report):
+ with open(report_file_path, 'w', encoding='utf-8') as report_file:
+ for type, issues in report.items():
+ report_file.write(type + '\n')
+ for issue in issues:
+ report_file.write('\t' + issue + '\n')
+ report_file.write('\n')
+
+
+# Validate the metadata generated by Make for installed files and report if there is no metadata.
+def installed_file_has_metadata(installed_file_metadata, report):
+ installed_file = installed_file_metadata['installed_file']
+ module_path = installed_file_metadata['module_path']
+ product_copy_files = installed_file_metadata['product_copy_files']
+ kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+ is_platform_generated = installed_file_metadata['is_platform_generated']
+
+ if (not module_path and
+ not product_copy_files and
+ not kernel_module_copy_files and
+ not is_platform_generated and
+ not installed_file.endswith('.fsv_meta')):
+ report[ISSUE_NO_METADATA].append(installed_file)
+ return False
+
+ return True
+
+
+def report_metadata_file(metadata_file_path, installed_file_metadata, report):
+ if metadata_file_path:
+ report[INFO_METADATA_FOUND_FOR_PACKAGE].append(
+ 'installed_file: {}, module_path: {}, METADATA file: {}'.format(
+ installed_file_metadata['installed_file'],
+ installed_file_metadata['module_path'],
+ metadata_file_path + '/METADATA'))
+
+ package_metadata = metadata_file_pb2.Metadata()
+ with open(metadata_file_path + '/METADATA', 'rt') as f:
+ text_format.Parse(f.read(), package_metadata)
+
+ if not metadata_file_path in metadata_file_protos:
+ metadata_file_protos[metadata_file_path] = package_metadata
+ if not package_metadata.name:
+ report[ISSUE_METADATA_FILE_INCOMPLETE].append(f'{metadata_file_path}/METADATA does not has "name"')
+
+ if not package_metadata.third_party.version:
+ report[ISSUE_METADATA_FILE_INCOMPLETE].append(
+ f'{metadata_file_path}/METADATA does not has "third_party.version"')
+
+ for tag in package_metadata.third_party.security.tag:
+ if not tag.startswith(NVD_CPE23):
+ report[ISSUE_UNKNOWN_SECURITY_TAG_TYPE].append(
+ f'Unknown security tag type: {tag} in {metadata_file_path}/METADATA')
+ else:
+ report[ISSUE_NO_METADATA_FILE].append(
+ "installed_file: {}, module_path: {}".format(
+ installed_file_metadata['installed_file'], installed_file_metadata['module_path']))
+
+
+def generate_sbom_for_unbundled_apk():
+ with open(args.metadata, newline='') as sbom_metadata_file:
+ reader = csv.DictReader(sbom_metadata_file)
+ doc = sbom_data.Document(name=args.build_version,
+ namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+ creators=['Organization: ' + args.product_mfr])
+ for installed_file_metadata in reader:
+ installed_file = installed_file_metadata['installed_file']
+ if args.output_file != installed_file_metadata['build_output_path'] + '.spdx.json':
+ continue
+
+ module_path = installed_file_metadata['module_path']
+ package_id = new_package_id(module_path, PKG_PREBUILT)
+ package = sbom_data.Package(id=package_id,
+ name=module_path,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr)
+ file_id = new_file_id(installed_file)
+ file = sbom_data.File(id=file_id,
+ name=installed_file,
+ checksum=checksum(installed_file_metadata['build_output_path']))
+ relationship = sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=package_id)
+ doc.add_package(package)
+ doc.files.append(file)
+ doc.describes = file_id
+ doc.add_relationship(relationship)
+ doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+ break
+
+ with open(args.output_file, 'w', encoding='utf-8') as file:
+ sbom_writers.JSONWriter.write(doc, file)
+ fragment_file = args.output_file.removesuffix('.spdx.json') + '-fragment.spdx'
+ with open(fragment_file, 'w', encoding='utf-8') as file:
+ sbom_writers.TagValueWriter.write(doc, file, fragment=True)
+
+
+def main():
+ global args
+ args = get_args()
+ log('Args:', vars(args))
+
+ if args.unbundled_apk:
+ generate_sbom_for_unbundled_apk()
+ return
+
+ global metadata_file_protos
+ metadata_file_protos = {}
+
+ product_package = sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+ name=sbom_data.PACKAGE_NAME_PRODUCT,
+ download_location=sbom_data.VALUE_NONE,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr,
+ files_analyzed=True)
+
+ doc = sbom_data.Document(name=args.build_version,
+ namespace=f'https://www.google.com/sbom/spdx/android/{args.build_version}',
+ creators=['Organization: ' + args.product_mfr])
+ if not args.unbundled_apex:
+ doc.packages.append(product_package)
+
+ doc.packages.append(sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+ name=sbom_data.PACKAGE_NAME_PLATFORM,
+ download_location=sbom_data.VALUE_NONE,
+ version=args.build_version,
+ supplier='Organization: ' + args.product_mfr))
+
+ # Report on some issues and information
+ report = {
+ ISSUE_NO_METADATA: [],
+ ISSUE_NO_METADATA_FILE: [],
+ ISSUE_METADATA_FILE_INCOMPLETE: [],
+ ISSUE_UNKNOWN_SECURITY_TAG_TYPE: [],
+ ISSUE_INSTALLED_FILE_NOT_EXIST: [],
+ INFO_METADATA_FOUND_FOR_PACKAGE: [],
+ }
+
+ # Scan the metadata in CSV file and create the corresponding package and file records in SPDX
+ with open(args.metadata, newline='') as sbom_metadata_file:
+ reader = csv.DictReader(sbom_metadata_file)
+ for installed_file_metadata in reader:
+ installed_file = installed_file_metadata['installed_file']
+ module_path = installed_file_metadata['module_path']
+ product_copy_files = installed_file_metadata['product_copy_files']
+ kernel_module_copy_files = installed_file_metadata['kernel_module_copy_files']
+ build_output_path = installed_file_metadata['build_output_path']
+
+ if not installed_file_has_metadata(installed_file_metadata, report):
+ continue
+ if not (os.path.islink(build_output_path) or os.path.isfile(build_output_path)):
+ report[ISSUE_INSTALLED_FILE_NOT_EXIST].append(installed_file)
+ continue
+
+ file_id = new_file_id(installed_file)
+ doc.files.append(
+ sbom_data.File(id=file_id, name=installed_file, checksum=checksum(build_output_path)))
+ if not args.unbundled_apex:
+ product_package.file_ids.append(file_id)
+ elif len(doc.files) > 1:
+ doc.add_relationship(sbom_data.Relationship(doc.files[0].id, sbom_data.RelationshipType.CONTAINS, file_id))
+
+ if is_source_package(installed_file_metadata) or is_prebuilt_package(installed_file_metadata):
+ metadata_file_path = get_metadata_file_path(installed_file_metadata)
+ report_metadata_file(metadata_file_path, installed_file_metadata, report)
+
+ # File from source fork packages or prebuilt fork packages
+ external_doc_ref, pkgs, rels = get_sbom_fragments(installed_file_metadata, metadata_file_path)
+ if len(pkgs) > 0:
+ if external_doc_ref:
+ doc.add_external_ref(external_doc_ref)
+ for p in pkgs:
+ doc.add_package(p)
+ for rel in rels:
+ doc.add_relationship(rel)
+ fork_package_id = pkgs[0].id # The first package should be the source/prebuilt fork package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=fork_package_id))
+ elif module_path or installed_file_metadata['is_platform_generated']:
+ # File from PLATFORM package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif product_copy_files:
+ # Format of product_copy_files: <source path>:<dest path>
+ src_path = product_copy_files.split(':')[0]
+ # So far product_copy_files are copied from directory system, kernel, hardware, frameworks and device,
+ # so process them as files from PLATFORM package
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif installed_file.endswith('.fsv_meta'):
+ # See build/make/core/Makefile:2988
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ elif kernel_module_copy_files.startswith('ANDROID-GEN'):
+ # For the four files generated for _dlkm, _ramdisk partitions
+ # See build/make/core/Makefile:323
+ doc.add_relationship(sbom_data.Relationship(id1=file_id,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+
+ if not args.unbundled_apex:
+ product_package.verification_code = generate_package_verification_code(doc.files)
+
+ if args.unbundled_apex:
+ doc.describes = doc.files[0].id
+
+ # Save SBOM records to output file
+ doc.created = datetime.datetime.now(tz=datetime.timezone.utc).strftime('%Y-%m-%dT%H:%M:%SZ')
+ prefix = args.output_file
+ if prefix.endswith('.spdx'):
+ prefix = prefix.removesuffix('.spdx')
+ elif prefix.endswith('.spdx.json'):
+ prefix = prefix.removesuffix('.spdx.json')
+
+ output_file = prefix + '.spdx'
+ if args.unbundled_apex:
+ output_file = prefix + '-fragment.spdx'
+ with open(output_file, 'w', encoding="utf-8") as file:
+ sbom_writers.TagValueWriter.write(doc, file, fragment=args.unbundled_apex)
+ if args.json:
+ with open(prefix + '.spdx.json', 'w', encoding="utf-8") as file:
+ sbom_writers.JSONWriter.write(doc, file)
+
+ save_report(prefix + '-gen-report.txt', report)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/tools/sbom/sbom_data.py b/tools/sbom/sbom_data.py
new file mode 100644
index 0000000..14c4eb2
--- /dev/null
+++ b/tools/sbom/sbom_data.py
@@ -0,0 +1,124 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Define data classes that model SBOMs defined by SPDX. The data classes could be
+written out to different formats (tagvalue, JSON, etc) of SPDX with corresponding
+writer utilities.
+
+Rrefer to SPDX 2.3 spec: https://spdx.github.io/spdx-spec/v2.3/ and go/android-spdx for details of
+fields in each data class.
+"""
+
+from dataclasses import dataclass, field
+from typing import List
+
+SPDXID_DOC = 'SPDXRef-DOCUMENT'
+SPDXID_PRODUCT = 'SPDXRef-PRODUCT'
+SPDXID_PLATFORM = 'SPDXRef-PLATFORM'
+
+PACKAGE_NAME_PRODUCT = 'PRODUCT'
+PACKAGE_NAME_PLATFORM = 'PLATFORM'
+
+VALUE_NOASSERTION = 'NOASSERTION'
+VALUE_NONE = 'NONE'
+
+
+class PackageExternalRefCategory:
+ SECURITY = 'SECURITY'
+ PACKAGE_MANAGER = 'PACKAGE-MANAGER'
+ PERSISTENT_ID = 'PERSISTENT-ID'
+ OTHER = 'OTHER'
+
+
+class PackageExternalRefType:
+ cpe22Type = 'cpe22Type'
+ cpe23Type = 'cpe23Type'
+
+
+@dataclass
+class PackageExternalRef:
+ category: PackageExternalRefCategory
+ type: PackageExternalRefType
+ locator: str
+
+
+@dataclass
+class Package:
+ name: str
+ id: str
+ version: str = None
+ supplier: str = None
+ download_location: str = None
+ files_analyzed: bool = False
+ verification_code: str = None
+ file_ids: List[str] = field(default_factory=list)
+ external_refs: List[PackageExternalRef] = field(default_factory=list)
+
+
+@dataclass
+class File:
+ id: str
+ name: str
+ checksum: str
+
+
+class RelationshipType:
+ DESCRIBES = 'DESCRIBES'
+ VARIANT_OF = 'VARIANT_OF'
+ GENERATED_FROM = 'GENERATED_FROM'
+ CONTAINS = 'CONTAINS'
+
+
+@dataclass
+class Relationship:
+ id1: str
+ relationship: RelationshipType
+ id2: str
+
+
+@dataclass
+class DocumentExternalReference:
+ id: str
+ uri: str
+ checksum: str
+
+
+@dataclass
+class Document:
+ name: str
+ namespace: str
+ id: str = SPDXID_DOC
+ describes: str = SPDXID_PRODUCT
+ creators: List[str] = field(default_factory=list)
+ created: str = None
+ external_refs: List[DocumentExternalReference] = field(default_factory=list)
+ packages: List[Package] = field(default_factory=list)
+ files: List[File] = field(default_factory=list)
+ relationships: List[Relationship] = field(default_factory=list)
+
+ def add_external_ref(self, external_ref):
+ if not any(external_ref.uri == ref.uri for ref in self.external_refs):
+ self.external_refs.append(external_ref)
+
+ def add_package(self, package):
+ if not any(package.id == p.id for p in self.packages):
+ self.packages.append(package)
+
+ def add_relationship(self, rel):
+ if not any(rel.id1 == r.id1 and rel.id2 == r.id2 and rel.relationship == r.relationship
+ for r in self.relationships):
+ self.relationships.append(rel)
diff --git a/tools/sbom/sbom_writers.py b/tools/sbom/sbom_writers.py
new file mode 100644
index 0000000..85dee9d
--- /dev/null
+++ b/tools/sbom/sbom_writers.py
@@ -0,0 +1,369 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+Serialize objects defined in package sbom_data to SPDX format: tagvalue, JSON.
+"""
+
+import json
+import sbom_data
+
+SPDX_VER = 'SPDX-2.3'
+DATA_LIC = 'CC0-1.0'
+
+
+class Tags:
+ # Common
+ SPDXID = 'SPDXID'
+ SPDX_VERSION = 'SPDXVersion'
+ DATA_LICENSE = 'DataLicense'
+ DOCUMENT_NAME = 'DocumentName'
+ DOCUMENT_NAMESPACE = 'DocumentNamespace'
+ CREATED = 'Created'
+ CREATOR = 'Creator'
+ EXTERNAL_DOCUMENT_REF = 'ExternalDocumentRef'
+
+ # Package
+ PACKAGE_NAME = 'PackageName'
+ PACKAGE_DOWNLOAD_LOCATION = 'PackageDownloadLocation'
+ PACKAGE_VERSION = 'PackageVersion'
+ PACKAGE_SUPPLIER = 'PackageSupplier'
+ FILES_ANALYZED = 'FilesAnalyzed'
+ PACKAGE_VERIFICATION_CODE = 'PackageVerificationCode'
+ PACKAGE_EXTERNAL_REF = 'ExternalRef'
+ # Package license
+ PACKAGE_LICENSE_CONCLUDED = 'PackageLicenseConcluded'
+ PACKAGE_LICENSE_INFO_FROM_FILES = 'PackageLicenseInfoFromFiles'
+ PACKAGE_LICENSE_DECLARED = 'PackageLicenseDeclared'
+ PACKAGE_LICENSE_COMMENTS = 'PackageLicenseComments'
+
+ # File
+ FILE_NAME = 'FileName'
+ FILE_CHECKSUM = 'FileChecksum'
+ # File license
+ FILE_LICENSE_CONCLUDED = 'LicenseConcluded'
+ FILE_LICENSE_INFO_IN_FILE = 'LicenseInfoInFile'
+ FILE_LICENSE_COMMENTS = 'LicenseComments'
+ FILE_COPYRIGHT_TEXT = 'FileCopyrightText'
+ FILE_NOTICE = 'FileNotice'
+ FILE_ATTRIBUTION_TEXT = 'FileAttributionText'
+
+ # Relationship
+ RELATIONSHIP = 'Relationship'
+
+
+class TagValueWriter:
+ @staticmethod
+ def marshal_doc_headers(sbom_doc):
+ headers = [
+ f'{Tags.SPDX_VERSION}: {SPDX_VER}',
+ f'{Tags.DATA_LICENSE}: {DATA_LIC}',
+ f'{Tags.SPDXID}: {sbom_doc.id}',
+ f'{Tags.DOCUMENT_NAME}: {sbom_doc.name}',
+ f'{Tags.DOCUMENT_NAMESPACE}: {sbom_doc.namespace}',
+ ]
+ for creator in sbom_doc.creators:
+ headers.append(f'{Tags.CREATOR}: {creator}')
+ headers.append(f'{Tags.CREATED}: {sbom_doc.created}')
+ for doc_ref in sbom_doc.external_refs:
+ headers.append(
+ f'{Tags.EXTERNAL_DOCUMENT_REF}: {doc_ref.id} {doc_ref.uri} {doc_ref.checksum}')
+ headers.append('')
+ return headers
+
+ @staticmethod
+ def marshal_package(package):
+ download_location = sbom_data.VALUE_NOASSERTION
+ if package.download_location:
+ download_location = package.download_location
+ tagvalues = [
+ f'{Tags.PACKAGE_NAME}: {package.name}',
+ f'{Tags.SPDXID}: {package.id}',
+ f'{Tags.PACKAGE_DOWNLOAD_LOCATION}: {download_location}',
+ f'{Tags.FILES_ANALYZED}: {str(package.files_analyzed).lower()}',
+ ]
+ if package.version:
+ tagvalues.append(f'{Tags.PACKAGE_VERSION}: {package.version}')
+ if package.supplier:
+ tagvalues.append(f'{Tags.PACKAGE_SUPPLIER}: {package.supplier}')
+ if package.verification_code:
+ tagvalues.append(f'{Tags.PACKAGE_VERIFICATION_CODE}: {package.verification_code}')
+ if package.external_refs:
+ for external_ref in package.external_refs:
+ tagvalues.append(
+ f'{Tags.PACKAGE_EXTERNAL_REF}: {external_ref.category} {external_ref.type} {external_ref.locator}')
+
+ tagvalues.append('')
+ return tagvalues
+
+ @staticmethod
+ def marshal_described_element(sbom_doc, fragment):
+ if not sbom_doc.describes:
+ return None
+
+ product_package = [p for p in sbom_doc.packages if p.id == sbom_doc.describes]
+ if product_package:
+ tagvalues = TagValueWriter.marshal_package(product_package[0])
+ if not fragment:
+ tagvalues.append(
+ f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+ tagvalues.append('')
+ return tagvalues
+
+ file = [f for f in sbom_doc.files if f.id == sbom_doc.describes]
+ if file:
+ tagvalues = TagValueWriter.marshal_file(file[0])
+ if not fragment:
+ tagvalues.append(
+ f'{Tags.RELATIONSHIP}: {sbom_doc.id} {sbom_data.RelationshipType.DESCRIBES} {sbom_doc.describes}')
+
+ return tagvalues
+
+ return None
+
+ @staticmethod
+ def marshal_packages(sbom_doc):
+ tagvalues = []
+ marshaled_relationships = []
+ i = 0
+ packages = sbom_doc.packages
+ while i < len(packages):
+ if packages[i].id == sbom_doc.describes:
+ i += 1
+ continue
+
+ if i + 1 < len(packages) \
+ and packages[i].id.startswith('SPDXRef-SOURCE-') \
+ and packages[i + 1].id.startswith('SPDXRef-UPSTREAM-'):
+ tagvalues += TagValueWriter.marshal_package(packages[i])
+ tagvalues += TagValueWriter.marshal_package(packages[i + 1])
+ rel = next((r for r in sbom_doc.relationships if
+ r.id1 == packages[i].id and
+ r.id2 == packages[i + 1].id and
+ r.relationship == sbom_data.RelationshipType.VARIANT_OF), None)
+ if rel:
+ marshaled_relationships.append(rel)
+ tagvalues.append(TagValueWriter.marshal_relationship(rel))
+ tagvalues.append('')
+
+ i += 2
+ else:
+ tagvalues += TagValueWriter.marshal_package(packages[i])
+ i += 1
+
+ return tagvalues, marshaled_relationships
+
+ @staticmethod
+ def marshal_file(file):
+ tagvalues = [
+ f'{Tags.FILE_NAME}: {file.name}',
+ f'{Tags.SPDXID}: {file.id}',
+ f'{Tags.FILE_CHECKSUM}: {file.checksum}',
+ '',
+ ]
+
+ return tagvalues
+
+ @staticmethod
+ def marshal_files(sbom_doc):
+ tagvalues = []
+ for file in sbom_doc.files:
+ if file.id == sbom_doc.describes:
+ continue
+ tagvalues += TagValueWriter.marshal_file(file)
+ return tagvalues
+
+ @staticmethod
+ def marshal_relationship(rel):
+ return f'{Tags.RELATIONSHIP}: {rel.id1} {rel.relationship} {rel.id2}'
+
+ @staticmethod
+ def marshal_relationships(sbom_doc, marshaled_rels):
+ tagvalues = []
+ sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.id2 + r.id1)
+ for rel in sorted_rels:
+ if any(r.id1 == rel.id1 and r.id2 == rel.id2 and r.relationship == rel.relationship
+ for r in marshaled_rels):
+ continue
+ tagvalues.append(TagValueWriter.marshal_relationship(rel))
+ tagvalues.append('')
+ return tagvalues
+
+ @staticmethod
+ def write(sbom_doc, file, fragment=False):
+ content = []
+ if not fragment:
+ content += TagValueWriter.marshal_doc_headers(sbom_doc)
+ described_element = TagValueWriter.marshal_described_element(sbom_doc, fragment)
+ if described_element:
+ content += described_element
+ content += TagValueWriter.marshal_files(sbom_doc)
+ tagvalues, marshaled_relationships = TagValueWriter.marshal_packages(sbom_doc)
+ content += tagvalues
+ content += TagValueWriter.marshal_relationships(sbom_doc, marshaled_relationships)
+ file.write('\n'.join(content))
+
+
+class PropNames:
+ # Common
+ SPDXID = 'SPDXID'
+ SPDX_VERSION = 'spdxVersion'
+ DATA_LICENSE = 'dataLicense'
+ NAME = 'name'
+ DOCUMENT_NAMESPACE = 'documentNamespace'
+ CREATION_INFO = 'creationInfo'
+ CREATORS = 'creators'
+ CREATED = 'created'
+ EXTERNAL_DOCUMENT_REF = 'externalDocumentRefs'
+ DOCUMENT_DESCRIBES = 'documentDescribes'
+ EXTERNAL_DOCUMENT_ID = 'externalDocumentId'
+ EXTERNAL_DOCUMENT_URI = 'spdxDocument'
+ EXTERNAL_DOCUMENT_CHECKSUM = 'checksum'
+ ALGORITHM = 'algorithm'
+ CHECKSUM_VALUE = 'checksumValue'
+
+ # Package
+ PACKAGES = 'packages'
+ PACKAGE_DOWNLOAD_LOCATION = 'downloadLocation'
+ PACKAGE_VERSION = 'versionInfo'
+ PACKAGE_SUPPLIER = 'supplier'
+ FILES_ANALYZED = 'filesAnalyzed'
+ PACKAGE_VERIFICATION_CODE = 'packageVerificationCode'
+ PACKAGE_VERIFICATION_CODE_VALUE = 'packageVerificationCodeValue'
+ PACKAGE_EXTERNAL_REFS = 'externalRefs'
+ PACKAGE_EXTERNAL_REF_CATEGORY = 'referenceCategory'
+ PACKAGE_EXTERNAL_REF_TYPE = 'referenceType'
+ PACKAGE_EXTERNAL_REF_LOCATOR = 'referenceLocator'
+ PACKAGE_HAS_FILES = 'hasFiles'
+
+ # File
+ FILES = 'files'
+ FILE_NAME = 'fileName'
+ FILE_CHECKSUMS = 'checksums'
+
+ # Relationship
+ RELATIONSHIPS = 'relationships'
+ REL_ELEMENT_ID = 'spdxElementId'
+ REL_RELATED_ELEMENT_ID = 'relatedSpdxElement'
+ REL_TYPE = 'relationshipType'
+
+
+class JSONWriter:
+ @staticmethod
+ def marshal_doc_headers(sbom_doc):
+ headers = {
+ PropNames.SPDX_VERSION: SPDX_VER,
+ PropNames.DATA_LICENSE: DATA_LIC,
+ PropNames.SPDXID: sbom_doc.id,
+ PropNames.NAME: sbom_doc.name,
+ PropNames.DOCUMENT_NAMESPACE: sbom_doc.namespace,
+ PropNames.CREATION_INFO: {}
+ }
+ creators = [creator for creator in sbom_doc.creators]
+ headers[PropNames.CREATION_INFO][PropNames.CREATORS] = creators
+ headers[PropNames.CREATION_INFO][PropNames.CREATED] = sbom_doc.created
+ external_refs = []
+ for doc_ref in sbom_doc.external_refs:
+ checksum = doc_ref.checksum.split(': ')
+ external_refs.append({
+ PropNames.EXTERNAL_DOCUMENT_ID: f'{doc_ref.id}',
+ PropNames.EXTERNAL_DOCUMENT_URI: doc_ref.uri,
+ PropNames.EXTERNAL_DOCUMENT_CHECKSUM: {
+ PropNames.ALGORITHM: checksum[0],
+ PropNames.CHECKSUM_VALUE: checksum[1]
+ }
+ })
+ if external_refs:
+ headers[PropNames.EXTERNAL_DOCUMENT_REF] = external_refs
+ headers[PropNames.DOCUMENT_DESCRIBES] = [sbom_doc.describes]
+
+ return headers
+
+ @staticmethod
+ def marshal_packages(sbom_doc):
+ packages = []
+ for p in sbom_doc.packages:
+ package = {
+ PropNames.NAME: p.name,
+ PropNames.SPDXID: p.id,
+ PropNames.PACKAGE_DOWNLOAD_LOCATION: p.download_location if p.download_location else sbom_data.VALUE_NOASSERTION,
+ PropNames.FILES_ANALYZED: p.files_analyzed
+ }
+ if p.version:
+ package[PropNames.PACKAGE_VERSION] = p.version
+ if p.supplier:
+ package[PropNames.PACKAGE_SUPPLIER] = p.supplier
+ if p.verification_code:
+ package[PropNames.PACKAGE_VERIFICATION_CODE] = {
+ PropNames.PACKAGE_VERIFICATION_CODE_VALUE: p.verification_code
+ }
+ if p.external_refs:
+ package[PropNames.PACKAGE_EXTERNAL_REFS] = []
+ for ref in p.external_refs:
+ ext_ref = {
+ PropNames.PACKAGE_EXTERNAL_REF_CATEGORY: ref.category,
+ PropNames.PACKAGE_EXTERNAL_REF_TYPE: ref.type,
+ PropNames.PACKAGE_EXTERNAL_REF_LOCATOR: ref.locator,
+ }
+ package[PropNames.PACKAGE_EXTERNAL_REFS].append(ext_ref)
+ if p.file_ids:
+ package[PropNames.PACKAGE_HAS_FILES] = []
+ for file_id in p.file_ids:
+ package[PropNames.PACKAGE_HAS_FILES].append(file_id)
+
+ packages.append(package)
+
+ return {PropNames.PACKAGES: packages}
+
+ @staticmethod
+ def marshal_files(sbom_doc):
+ files = []
+ for f in sbom_doc.files:
+ file = {
+ PropNames.FILE_NAME: f.name,
+ PropNames.SPDXID: f.id
+ }
+ checksum = f.checksum.split(': ')
+ file[PropNames.FILE_CHECKSUMS] = [{
+ PropNames.ALGORITHM: checksum[0],
+ PropNames.CHECKSUM_VALUE: checksum[1],
+ }]
+ files.append(file)
+ return {PropNames.FILES: files}
+
+ @staticmethod
+ def marshal_relationships(sbom_doc):
+ relationships = []
+ sorted_rels = sorted(sbom_doc.relationships, key=lambda r: r.relationship + r.id2 + r.id1)
+ for r in sorted_rels:
+ rel = {
+ PropNames.REL_ELEMENT_ID: r.id1,
+ PropNames.REL_RELATED_ELEMENT_ID: r.id2,
+ PropNames.REL_TYPE: r.relationship,
+ }
+ relationships.append(rel)
+
+ return {PropNames.RELATIONSHIPS: relationships}
+
+ @staticmethod
+ def write(sbom_doc, file):
+ doc = {}
+ doc.update(JSONWriter.marshal_doc_headers(sbom_doc))
+ doc.update(JSONWriter.marshal_packages(sbom_doc))
+ doc.update(JSONWriter.marshal_files(sbom_doc))
+ doc.update(JSONWriter.marshal_relationships(sbom_doc))
+ file.write(json.dumps(doc, indent=4))
diff --git a/tools/sbom/sbom_writers_test.py b/tools/sbom/sbom_writers_test.py
new file mode 100644
index 0000000..361dae6
--- /dev/null
+++ b/tools/sbom/sbom_writers_test.py
@@ -0,0 +1,158 @@
+#!/usr/bin/env python3
+#
+# Copyright (C) 2023 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import io
+import pathlib
+import unittest
+import sbom_data
+import sbom_writers
+
+BUILD_FINGER_PRINT = 'build_finger_print'
+SUPPLIER_GOOGLE = 'Organization: Google'
+SUPPLIER_UPSTREAM = 'Organization: upstream'
+
+SPDXID_PREBUILT_PACKAGE1 = 'SPDXRef-PREBUILT-package1'
+SPDXID_SOURCE_PACKAGE1 = 'SPDXRef-SOURCE-package1'
+SPDXID_UPSTREAM_PACKAGE1 = 'SPDXRef-UPSTREAM-package1'
+
+SPDXID_FILE1 = 'SPDXRef-file1'
+SPDXID_FILE2 = 'SPDXRef-file2'
+SPDXID_FILE3 = 'SPDXRef-file3'
+
+
+class SBOMWritersTest(unittest.TestCase):
+
+ def setUp(self):
+ # SBOM of a product
+ self.sbom_doc = sbom_data.Document(name='test doc',
+ namespace='http://www.google.com/sbom/spdx/android',
+ creators=[SUPPLIER_GOOGLE],
+ created='2023-03-31T22:17:58Z',
+ describes=sbom_data.SPDXID_PRODUCT)
+ self.sbom_doc.add_external_ref(
+ sbom_data.DocumentExternalReference(id='DocumentRef-external_doc_ref',
+ uri='external_doc_uri',
+ checksum='SHA1: 1234567890'))
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=sbom_data.SPDXID_PRODUCT,
+ name=sbom_data.PACKAGE_NAME_PRODUCT,
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ files_analyzed=True,
+ verification_code='123456',
+ file_ids=[SPDXID_FILE1, SPDXID_FILE2, SPDXID_FILE3]))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=sbom_data.SPDXID_PLATFORM,
+ name=sbom_data.PACKAGE_NAME_PLATFORM,
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_PREBUILT_PACKAGE1,
+ name='Prebuilt package1',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+ name='Source package1',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT,
+ external_refs=[sbom_data.PackageExternalRef(
+ category=sbom_data.PackageExternalRefCategory.SECURITY,
+ type=sbom_data.PackageExternalRefType.cpe22Type,
+ locator='cpe:/a:jsoncpp_project:jsoncpp:1.9.4')]
+ ))
+
+ self.sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_UPSTREAM_PACKAGE1,
+ name='Upstream package1',
+ supplier=SUPPLIER_UPSTREAM,
+ version='1.1',
+ ))
+
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_SOURCE_PACKAGE1,
+ relationship=sbom_data.RelationshipType.VARIANT_OF,
+ id2=SPDXID_UPSTREAM_PACKAGE1))
+
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE1, name='/bin/file1', checksum='SHA1: 11111'))
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE2, name='/bin/file2', checksum='SHA1: 22222'))
+ self.sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE3, name='/bin/file3', checksum='SHA1: 33333'))
+
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=sbom_data.SPDXID_PLATFORM))
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE2,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_PREBUILT_PACKAGE1))
+ self.sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE3,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_SOURCE_PACKAGE1
+ ))
+
+ # SBOM fragment of a APK
+ self.unbundled_sbom_doc = sbom_data.Document(name='test doc',
+ namespace='http://www.google.com/sbom/spdx/android',
+ creators=[SUPPLIER_GOOGLE],
+ created='2023-03-31T22:17:58Z',
+ describes=SPDXID_FILE1)
+
+ self.unbundled_sbom_doc.files.append(
+ sbom_data.File(id=SPDXID_FILE1, name='/bin/file1.apk', checksum='SHA1: 11111'))
+ self.unbundled_sbom_doc.add_package(
+ sbom_data.Package(id=SPDXID_SOURCE_PACKAGE1,
+ name='Unbundled apk package',
+ download_location=sbom_data.VALUE_NONE,
+ supplier=SUPPLIER_GOOGLE,
+ version=BUILD_FINGER_PRINT))
+ self.unbundled_sbom_doc.add_relationship(sbom_data.Relationship(id1=SPDXID_FILE1,
+ relationship=sbom_data.RelationshipType.GENERATED_FROM,
+ id2=SPDXID_SOURCE_PACKAGE1))
+
+ def test_tagvalue_writer(self):
+ with io.StringIO() as output:
+ sbom_writers.TagValueWriter.write(self.sbom_doc, output)
+ expected_output = pathlib.Path('testdata/expected_tagvalue_sbom.spdx').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+ def test_tagvalue_writer_unbundled(self):
+ with io.StringIO() as output:
+ sbom_writers.TagValueWriter.write(self.unbundled_sbom_doc, output, fragment=True)
+ expected_output = pathlib.Path('testdata/expected_tagvalue_sbom_unbundled.spdx').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+ def test_json_writer(self):
+ with io.StringIO() as output:
+ sbom_writers.JSONWriter.write(self.sbom_doc, output)
+ expected_output = pathlib.Path('testdata/expected_json_sbom.spdx.json').read_text()
+ self.maxDiff = None
+ self.assertEqual(expected_output, output.getvalue())
+
+
+if __name__ == '__main__':
+ unittest.main(verbosity=2)
diff --git a/tools/sbom/testdata/expected_json_sbom.spdx.json b/tools/sbom/testdata/expected_json_sbom.spdx.json
new file mode 100644
index 0000000..32715a5
--- /dev/null
+++ b/tools/sbom/testdata/expected_json_sbom.spdx.json
@@ -0,0 +1,137 @@
+{
+ "spdxVersion": "SPDX-2.3",
+ "dataLicense": "CC0-1.0",
+ "SPDXID": "SPDXRef-DOCUMENT",
+ "name": "test doc",
+ "documentNamespace": "http://www.google.com/sbom/spdx/android",
+ "creationInfo": {
+ "creators": [
+ "Organization: Google"
+ ],
+ "created": "2023-03-31T22:17:58Z"
+ },
+ "externalDocumentRefs": [
+ {
+ "externalDocumentId": "DocumentRef-external_doc_ref",
+ "spdxDocument": "external_doc_uri",
+ "checksum": {
+ "algorithm": "SHA1",
+ "checksumValue": "1234567890"
+ }
+ }
+ ],
+ "documentDescribes": [
+ "SPDXRef-PRODUCT"
+ ],
+ "packages": [
+ {
+ "name": "PRODUCT",
+ "SPDXID": "SPDXRef-PRODUCT",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": true,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google",
+ "packageVerificationCode": {
+ "packageVerificationCodeValue": "123456"
+ },
+ "hasFiles": [
+ "SPDXRef-file1",
+ "SPDXRef-file2",
+ "SPDXRef-file3"
+ ]
+ },
+ {
+ "name": "PLATFORM",
+ "SPDXID": "SPDXRef-PLATFORM",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google"
+ },
+ {
+ "name": "Prebuilt package1",
+ "SPDXID": "SPDXRef-PREBUILT-package1",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google"
+ },
+ {
+ "name": "Source package1",
+ "SPDXID": "SPDXRef-SOURCE-package1",
+ "downloadLocation": "NONE",
+ "filesAnalyzed": false,
+ "versionInfo": "build_finger_print",
+ "supplier": "Organization: Google",
+ "externalRefs": [
+ {
+ "referenceCategory": "SECURITY",
+ "referenceType": "cpe22Type",
+ "referenceLocator": "cpe:/a:jsoncpp_project:jsoncpp:1.9.4"
+ }
+ ]
+ },
+ {
+ "name": "Upstream package1",
+ "SPDXID": "SPDXRef-UPSTREAM-package1",
+ "downloadLocation": "NOASSERTION",
+ "filesAnalyzed": false,
+ "versionInfo": "1.1",
+ "supplier": "Organization: upstream"
+ }
+ ],
+ "files": [
+ {
+ "fileName": "/bin/file1",
+ "SPDXID": "SPDXRef-file1",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "11111"
+ }
+ ]
+ },
+ {
+ "fileName": "/bin/file2",
+ "SPDXID": "SPDXRef-file2",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "22222"
+ }
+ ]
+ },
+ {
+ "fileName": "/bin/file3",
+ "SPDXID": "SPDXRef-file3",
+ "checksums": [
+ {
+ "algorithm": "SHA1",
+ "checksumValue": "33333"
+ }
+ ]
+ }
+ ],
+ "relationships": [
+ {
+ "spdxElementId": "SPDXRef-file1",
+ "relatedSpdxElement": "SPDXRef-PLATFORM",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-file2",
+ "relatedSpdxElement": "SPDXRef-PREBUILT-package1",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-file3",
+ "relatedSpdxElement": "SPDXRef-SOURCE-package1",
+ "relationshipType": "GENERATED_FROM"
+ },
+ {
+ "spdxElementId": "SPDXRef-SOURCE-package1",
+ "relatedSpdxElement": "SPDXRef-UPSTREAM-package1",
+ "relationshipType": "VARIANT_OF"
+ }
+ ]
+}
\ No newline at end of file
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom.spdx b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
new file mode 100644
index 0000000..ee39e82
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom.spdx
@@ -0,0 +1,65 @@
+SPDXVersion: SPDX-2.3
+DataLicense: CC0-1.0
+SPDXID: SPDXRef-DOCUMENT
+DocumentName: test doc
+DocumentNamespace: http://www.google.com/sbom/spdx/android
+Creator: Organization: Google
+Created: 2023-03-31T22:17:58Z
+ExternalDocumentRef: DocumentRef-external_doc_ref external_doc_uri SHA1: 1234567890
+
+PackageName: PRODUCT
+SPDXID: SPDXRef-PRODUCT
+PackageDownloadLocation: NONE
+FilesAnalyzed: true
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+PackageVerificationCode: 123456
+
+Relationship: SPDXRef-DOCUMENT DESCRIBES SPDXRef-PRODUCT
+
+FileName: /bin/file1
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+FileName: /bin/file2
+SPDXID: SPDXRef-file2
+FileChecksum: SHA1: 22222
+
+FileName: /bin/file3
+SPDXID: SPDXRef-file3
+FileChecksum: SHA1: 33333
+
+PackageName: PLATFORM
+SPDXID: SPDXRef-PLATFORM
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Prebuilt package1
+SPDXID: SPDXRef-PREBUILT-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+PackageName: Source package1
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+ExternalRef: SECURITY cpe22Type cpe:/a:jsoncpp_project:jsoncpp:1.9.4
+
+PackageName: Upstream package1
+SPDXID: SPDXRef-UPSTREAM-package1
+PackageDownloadLocation: NOASSERTION
+FilesAnalyzed: false
+PackageVersion: 1.1
+PackageSupplier: Organization: upstream
+
+Relationship: SPDXRef-SOURCE-package1 VARIANT_OF SPDXRef-UPSTREAM-package1
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-PLATFORM
+Relationship: SPDXRef-file2 GENERATED_FROM SPDXRef-PREBUILT-package1
+Relationship: SPDXRef-file3 GENERATED_FROM SPDXRef-SOURCE-package1
diff --git a/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
new file mode 100644
index 0000000..a00c291
--- /dev/null
+++ b/tools/sbom/testdata/expected_tagvalue_sbom_unbundled.spdx
@@ -0,0 +1,12 @@
+FileName: /bin/file1.apk
+SPDXID: SPDXRef-file1
+FileChecksum: SHA1: 11111
+
+PackageName: Unbundled apk package
+SPDXID: SPDXRef-SOURCE-package1
+PackageDownloadLocation: NONE
+FilesAnalyzed: false
+PackageVersion: build_finger_print
+PackageSupplier: Organization: Google
+
+Relationship: SPDXRef-file1 GENERATED_FROM SPDXRef-SOURCE-package1